[ 577.638865] env[68673]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68673) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 577.639278] env[68673]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68673) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 577.639372] env[68673]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68673) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 577.639713] env[68673]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 577.728137] env[68673]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68673) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 577.738253] env[68673]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68673) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 577.879963] env[68673]: INFO nova.virt.driver [None req-556c1bb6-eae8-4db5-9dde-db0df2fe52f9 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 577.953023] env[68673]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.953279] env[68673]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.953383] env[68673]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68673) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 581.156583] env[68673]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-5be70f7a-a5ae-4c6d-929f-77039a2231a2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.172602] env[68673]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68673) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 581.172730] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-9200ff57-27fd-4e8f-bb94-26acf4585ff8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.217246] env[68673]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 805b0. [ 581.217405] env[68673]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.264s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.217941] env[68673]: INFO nova.virt.vmwareapi.driver [None req-556c1bb6-eae8-4db5-9dde-db0df2fe52f9 None None] VMware vCenter version: 7.0.3 [ 581.221368] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c37e78e-be67-4aeb-bfb7-6ed5278e4cee {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.239423] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82d155e-7a8d-498b-a161-53969aafc127 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.245495] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f1fc51-bdfc-4f83-b958-3b80ac537cfd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.252060] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25dc2098-c284-4118-a859-acdfe65c75cd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.265173] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e765fa-b29b-48e7-82a6-8eb2488ed6ae {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.271292] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4be4c83-8c75-4d2f-8742-ef58504c6d0d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.301826] env[68673]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-64147ca9-9a1f-409c-8c27-8ca203a27d4b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.307114] env[68673]: DEBUG nova.virt.vmwareapi.driver [None req-556c1bb6-eae8-4db5-9dde-db0df2fe52f9 None None] Extension org.openstack.compute already exists. {{(pid=68673) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 581.309730] env[68673]: INFO nova.compute.provider_config [None req-556c1bb6-eae8-4db5-9dde-db0df2fe52f9 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 581.328013] env[68673]: DEBUG nova.context [None req-556c1bb6-eae8-4db5-9dde-db0df2fe52f9 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),b4a43c08-2877-40be-8ab1-1bc1615cb625(cell1) {{(pid=68673) load_cells /opt/stack/nova/nova/context.py:464}} [ 581.329917] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.330153] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.330796] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.331231] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Acquiring lock "b4a43c08-2877-40be-8ab1-1bc1615cb625" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.331422] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Lock "b4a43c08-2877-40be-8ab1-1bc1615cb625" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.332428] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Lock "b4a43c08-2877-40be-8ab1-1bc1615cb625" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.352419] env[68673]: INFO dbcounter [None req-225c7628-4e91-4885-821a-52db083f716d None None] Registered counter for database nova_cell0 [ 581.360784] env[68673]: INFO dbcounter [None req-225c7628-4e91-4885-821a-52db083f716d None None] Registered counter for database nova_cell1 [ 581.363888] env[68673]: DEBUG oslo_db.sqlalchemy.engines [None req-225c7628-4e91-4885-821a-52db083f716d None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68673) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 581.364481] env[68673]: DEBUG oslo_db.sqlalchemy.engines [None req-225c7628-4e91-4885-821a-52db083f716d None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68673) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 581.368753] env[68673]: DEBUG dbcounter [-] [68673] Writer thread running {{(pid=68673) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 581.369860] env[68673]: DEBUG dbcounter [-] [68673] Writer thread running {{(pid=68673) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 581.371691] env[68673]: ERROR nova.db.main.api [None req-225c7628-4e91-4885-821a-52db083f716d None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 581.371691] env[68673]: result = function(*args, **kwargs) [ 581.371691] env[68673]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 581.371691] env[68673]: return func(*args, **kwargs) [ 581.371691] env[68673]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 581.371691] env[68673]: result = fn(*args, **kwargs) [ 581.371691] env[68673]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 581.371691] env[68673]: return f(*args, **kwargs) [ 581.371691] env[68673]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 581.371691] env[68673]: return db.service_get_minimum_version(context, binaries) [ 581.371691] env[68673]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 581.371691] env[68673]: _check_db_access() [ 581.371691] env[68673]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 581.371691] env[68673]: stacktrace = ''.join(traceback.format_stack()) [ 581.371691] env[68673]: [ 581.372717] env[68673]: ERROR nova.db.main.api [None req-225c7628-4e91-4885-821a-52db083f716d None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 581.372717] env[68673]: result = function(*args, **kwargs) [ 581.372717] env[68673]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 581.372717] env[68673]: return func(*args, **kwargs) [ 581.372717] env[68673]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 581.372717] env[68673]: result = fn(*args, **kwargs) [ 581.372717] env[68673]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 581.372717] env[68673]: return f(*args, **kwargs) [ 581.372717] env[68673]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 581.372717] env[68673]: return db.service_get_minimum_version(context, binaries) [ 581.372717] env[68673]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 581.372717] env[68673]: _check_db_access() [ 581.372717] env[68673]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 581.372717] env[68673]: stacktrace = ''.join(traceback.format_stack()) [ 581.372717] env[68673]: [ 581.373128] env[68673]: WARNING nova.objects.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 581.373235] env[68673]: WARNING nova.objects.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] Failed to get minimum service version for cell b4a43c08-2877-40be-8ab1-1bc1615cb625 [ 581.373650] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Acquiring lock "singleton_lock" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.373807] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Acquired lock "singleton_lock" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.374058] env[68673]: DEBUG oslo_concurrency.lockutils [None req-225c7628-4e91-4885-821a-52db083f716d None None] Releasing lock "singleton_lock" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.374376] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] Full set of CONF: {{(pid=68673) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 581.374518] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ******************************************************************************** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 581.374647] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] Configuration options gathered from: {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 581.374781] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 581.374970] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 581.375133] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ================================================================================ {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 581.375360] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] allow_resize_to_same_host = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.375535] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] arq_binding_timeout = 300 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.375667] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] backdoor_port = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.375794] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] backdoor_socket = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.375957] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] block_device_allocate_retries = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.376151] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] block_device_allocate_retries_interval = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.376336] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cert = self.pem {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.376505] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.376674] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute_monitors = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.376840] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] config_dir = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.377022] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] config_drive_format = iso9660 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.377152] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.377321] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] config_source = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.377513] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] console_host = devstack {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.377691] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] control_exchange = nova {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.377852] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cpu_allocation_ratio = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.378034] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] daemon = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.378216] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] debug = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.378379] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] default_access_ip_network_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.378550] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] default_availability_zone = nova {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.378709] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] default_ephemeral_format = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.378871] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] default_green_pool_size = 1000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.379119] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.379292] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] default_schedule_zone = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.379455] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] disk_allocation_ratio = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.379616] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] enable_new_services = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.379795] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] enabled_apis = ['osapi_compute'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.379962] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] enabled_ssl_apis = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.380136] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] flat_injected = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.380300] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] force_config_drive = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.380458] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] force_raw_images = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.380626] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] graceful_shutdown_timeout = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.380787] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] heal_instance_info_cache_interval = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.381000] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] host = cpu-1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.381189] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.381356] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] initial_disk_allocation_ratio = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.381517] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] initial_ram_allocation_ratio = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.381726] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.381890] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] instance_build_timeout = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.382062] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] instance_delete_interval = 300 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.382238] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] instance_format = [instance: %(uuid)s] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.382401] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] instance_name_template = instance-%08x {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.382561] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] instance_usage_audit = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.382727] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] instance_usage_audit_period = month {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.382891] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.383065] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] instances_path = /opt/stack/data/nova/instances {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.383235] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] internal_service_availability_zone = internal {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.383394] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] key = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.383553] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] live_migration_retry_count = 30 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.383717] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] log_config_append = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.383882] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.384049] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] log_dir = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.384210] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] log_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.384336] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] log_options = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.384497] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] log_rotate_interval = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.384663] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] log_rotate_interval_type = days {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.384825] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] log_rotation_type = none {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.384951] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.385094] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.385287] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.385458] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.385585] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.385746] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] long_rpc_timeout = 1800 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.385904] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] max_concurrent_builds = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.386076] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] max_concurrent_live_migrations = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.386261] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] max_concurrent_snapshots = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.386423] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] max_local_block_devices = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.386579] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] max_logfile_count = 30 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.386736] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] max_logfile_size_mb = 200 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.386892] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] maximum_instance_delete_attempts = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.387070] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] metadata_listen = 0.0.0.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.387241] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] metadata_listen_port = 8775 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.387414] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] metadata_workers = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.387597] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] migrate_max_retries = -1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.387766] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] mkisofs_cmd = genisoimage {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.387971] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] my_block_storage_ip = 10.180.1.21 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.388115] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] my_ip = 10.180.1.21 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.388282] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] network_allocate_retries = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.388458] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.388629] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] osapi_compute_listen = 0.0.0.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.388789] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] osapi_compute_listen_port = 8774 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.388955] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] osapi_compute_unique_server_name_scope = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.389134] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] osapi_compute_workers = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.389301] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] password_length = 12 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.389466] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] periodic_enable = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.389631] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] periodic_fuzzy_delay = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.389796] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] pointer_model = usbtablet {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.389964] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] preallocate_images = none {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.390139] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] publish_errors = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.390272] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] pybasedir = /opt/stack/nova {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.390431] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ram_allocation_ratio = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.390591] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] rate_limit_burst = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.390757] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] rate_limit_except_level = CRITICAL {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.390916] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] rate_limit_interval = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.391094] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] reboot_timeout = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.391259] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] reclaim_instance_interval = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.391418] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] record = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.391587] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] reimage_timeout_per_gb = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.391754] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] report_interval = 120 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.391914] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] rescue_timeout = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.392084] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] reserved_host_cpus = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.392246] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] reserved_host_disk_mb = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.392405] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] reserved_host_memory_mb = 512 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.392565] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] reserved_huge_pages = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.392725] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] resize_confirm_window = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.392886] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] resize_fs_using_block_device = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.393056] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] resume_guests_state_on_host_boot = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.393229] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.393394] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] rpc_response_timeout = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.393555] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] run_external_periodic_tasks = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.393722] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] running_deleted_instance_action = reap {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.393885] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] running_deleted_instance_poll_interval = 1800 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.394054] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] running_deleted_instance_timeout = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.394219] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler_instance_sync_interval = 120 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.394390] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_down_time = 720 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.394560] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] servicegroup_driver = db {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.394722] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] shelved_offload_time = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.394882] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] shelved_poll_interval = 3600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.395062] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] shutdown_timeout = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.395262] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] source_is_ipv6 = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.395431] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ssl_only = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.395678] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.395849] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] sync_power_state_interval = 600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.396017] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] sync_power_state_pool_size = 1000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.396223] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] syslog_log_facility = LOG_USER {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.396384] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] tempdir = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.396548] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] timeout_nbd = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.396718] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] transport_url = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.396880] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] update_resources_interval = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.397052] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] use_cow_images = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.397217] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] use_eventlog = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.397380] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] use_journal = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.397566] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] use_json = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.397731] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] use_rootwrap_daemon = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.397890] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] use_stderr = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.398063] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] use_syslog = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.398225] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vcpu_pin_set = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.398393] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plugging_is_fatal = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.398560] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plugging_timeout = 300 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.398725] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] virt_mkfs = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.398886] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] volume_usage_poll_interval = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.399056] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] watch_log_file = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.399229] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] web = /usr/share/spice-html5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 581.399418] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_concurrency.disable_process_locking = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.399698] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.399879] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.400056] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.400234] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.400407] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.400571] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.400795] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.auth_strategy = keystone {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.400979] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.compute_link_prefix = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.401188] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.401382] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.dhcp_domain = novalocal {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.401568] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.enable_instance_password = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.401748] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.glance_link_prefix = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.401928] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.402128] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.402313] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.instance_list_per_project_cells = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.402492] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.list_records_by_skipping_down_cells = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.402670] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.local_metadata_per_cell = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.402853] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.max_limit = 1000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.403048] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.metadata_cache_expiration = 15 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.403242] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.neutron_default_tenant_id = default {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.403425] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.use_neutron_default_nets = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.403615] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.403789] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.403960] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.404151] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.404327] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.vendordata_dynamic_targets = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.404497] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.vendordata_jsonfile_path = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.404681] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.404873] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.backend = dogpile.cache.memcached {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.405053] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.backend_argument = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.405257] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.config_prefix = cache.oslo {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.405435] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.dead_timeout = 60.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.405603] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.debug_cache_backend = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.405767] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.enable_retry_client = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.405930] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.enable_socket_keepalive = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.406134] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.enabled = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.406317] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.enforce_fips_mode = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.406486] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.expiration_time = 600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.406677] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.hashclient_retry_attempts = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.406820] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.hashclient_retry_delay = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.406983] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_dead_retry = 300 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.407158] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_password = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.407323] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.407506] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.407680] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_pool_maxsize = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.407844] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.408014] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_sasl_enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.408200] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.408371] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_socket_timeout = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.408533] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.memcache_username = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.408700] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.proxies = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.408858] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.redis_password = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.409035] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.redis_sentinel_service_name = mymaster {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.409216] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.409385] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.redis_server = localhost:6379 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.409558] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.redis_socket_timeout = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.409712] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.redis_username = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.409874] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.retry_attempts = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.410049] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.retry_delay = 0.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.410245] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.socket_keepalive_count = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.410572] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.socket_keepalive_idle = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.410572] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.socket_keepalive_interval = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.410740] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.tls_allowed_ciphers = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.410899] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.tls_cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.411070] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.tls_certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.411238] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.tls_enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.411398] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cache.tls_keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.411566] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.411738] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.auth_type = password {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.411898] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.412090] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.catalog_info = volumev3::publicURL {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.412259] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.412449] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.412588] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.cross_az_attach = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.412753] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.debug = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.412915] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.endpoint_template = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.413094] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.http_retries = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.413267] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.413430] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.413638] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.os_region_name = RegionOne {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.413819] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.413983] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cinder.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.414172] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.414340] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.cpu_dedicated_set = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.414502] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.cpu_shared_set = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.414670] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.image_type_exclude_list = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.414834] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.414999] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.max_concurrent_disk_ops = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.415199] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.max_disk_devices_to_attach = -1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.415376] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.415549] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.415717] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.resource_provider_association_refresh = 300 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.415882] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.shutdown_retry_interval = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.416083] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.416296] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] conductor.workers = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.416480] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] console.allowed_origins = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.416644] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] console.ssl_ciphers = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.416818] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] console.ssl_minimum_version = default {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.416988] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] consoleauth.enforce_session_timeout = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.417176] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] consoleauth.token_ttl = 600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.417348] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.417531] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.417708] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.417874] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.connect_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.418045] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.connect_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.418212] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.endpoint_override = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.418382] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.418543] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.418705] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.max_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.418865] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.min_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.419035] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.region_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.419201] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.retriable_status_codes = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.419363] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.service_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.419533] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.service_type = accelerator {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.419719] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.419854] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.status_code_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.420017] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.status_code_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.420181] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.420368] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.420532] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] cyborg.version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.420714] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.backend = sqlalchemy {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.420885] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.connection = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.421064] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.connection_debug = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.421239] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.connection_parameters = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.421408] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.connection_recycle_time = 3600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.421573] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.connection_trace = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.421736] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.db_inc_retry_interval = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.421901] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.db_max_retries = 20 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.422076] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.db_max_retry_interval = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.422244] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.db_retry_interval = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.422411] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.max_overflow = 50 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.422574] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.max_pool_size = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.422734] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.max_retries = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.422937] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.423236] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.mysql_wsrep_sync_wait = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.423550] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.pool_timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.423842] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.retry_interval = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.424156] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.slave_connection = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.424443] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.sqlite_synchronous = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.424650] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] database.use_db_reconnect = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.424843] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.backend = sqlalchemy {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.425031] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.connection = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.425601] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.connection_debug = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.425601] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.connection_parameters = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.425601] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.connection_recycle_time = 3600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.425757] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.connection_trace = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.425926] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.db_inc_retry_interval = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.426106] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.db_max_retries = 20 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.426289] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.db_max_retry_interval = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.426458] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.db_retry_interval = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.426623] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.max_overflow = 50 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.426787] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.max_pool_size = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.426947] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.max_retries = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.427134] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.427301] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.427480] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.pool_timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.427656] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.retry_interval = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.427818] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.slave_connection = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.427980] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] api_database.sqlite_synchronous = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.428174] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] devices.enabled_mdev_types = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.428356] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.428526] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ephemeral_storage_encryption.default_format = luks {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.428689] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ephemeral_storage_encryption.enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.428852] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.429032] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.api_servers = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.429202] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.429367] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.429531] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.429689] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.connect_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.429849] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.connect_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.430022] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.debug = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.430184] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.default_trusted_certificate_ids = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.430350] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.enable_certificate_validation = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.430509] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.enable_rbd_download = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.430668] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.endpoint_override = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.430832] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.430993] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.431167] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.max_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.431330] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.min_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.431493] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.num_retries = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.431667] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.rbd_ceph_conf = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.431827] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.rbd_connect_timeout = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.431994] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.rbd_pool = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.432175] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.rbd_user = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.432338] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.region_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.432496] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.retriable_status_codes = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.432655] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.service_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.432824] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.service_type = image {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.432986] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.433158] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.status_code_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.433317] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.status_code_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.433471] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.433648] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.433810] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.verify_glance_signatures = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.433967] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] glance.version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.434148] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] guestfs.debug = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.434316] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] mks.enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.434680] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.434872] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] image_cache.manager_interval = 2400 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.435054] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] image_cache.precache_concurrency = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.435263] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] image_cache.remove_unused_base_images = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.435444] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.435617] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.435796] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] image_cache.subdirectory_name = _base {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.435974] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.api_max_retries = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.436159] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.api_retry_interval = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.436425] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.436636] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.auth_type = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.436807] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.436969] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.437151] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.437321] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.conductor_group = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.437505] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.connect_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.437677] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.connect_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.437839] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.endpoint_override = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.438016] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.438179] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.438341] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.max_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.438514] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.min_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.438664] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.peer_list = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.438822] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.region_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.438983] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.retriable_status_codes = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.439163] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.serial_console_state_timeout = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.439327] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.service_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.439498] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.service_type = baremetal {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.439659] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.shard = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.439825] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.439988] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.status_code_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.440161] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.status_code_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.440325] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.440506] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.440666] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ironic.version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.440848] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.441031] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] key_manager.fixed_key = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.441219] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.441385] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.barbican_api_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.441544] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.barbican_endpoint = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.441713] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.barbican_endpoint_type = public {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.441872] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.barbican_region_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.442042] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.442205] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.442369] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.442528] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.442685] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.442847] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.number_of_retries = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.443014] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.retry_delay = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.443188] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.send_service_user_token = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.443349] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.443503] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.443663] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.verify_ssl = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.443818] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican.verify_ssl_path = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.443982] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.444159] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.auth_type = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.444319] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.444476] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.444636] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.444793] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.444951] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.445143] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.445316] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] barbican_service_user.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.445486] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.approle_role_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.445645] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.approle_secret_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.445800] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.445955] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.446132] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.446296] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.446452] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.446622] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.kv_mountpoint = secret {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.446779] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.kv_path = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.446941] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.kv_version = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.447111] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.namespace = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.447273] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.root_token_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.447454] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.447628] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.ssl_ca_crt_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.447785] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.447946] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.use_ssl = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.448129] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.448303] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.448465] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.auth_type = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.448624] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.448782] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.448943] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.449114] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.connect_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.449277] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.connect_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.449435] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.endpoint_override = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.449598] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.449755] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.449913] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.max_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.450082] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.min_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.450244] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.region_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.450400] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.retriable_status_codes = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.450556] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.service_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.450722] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.service_type = identity {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.450883] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.451058] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.status_code_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.451221] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.status_code_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.451382] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.451560] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.451726] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] keystone.version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.451921] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.connection_uri = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.452100] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.cpu_mode = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.452273] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.cpu_model_extra_flags = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.452449] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.cpu_models = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.452622] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.cpu_power_governor_high = performance {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.452791] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.cpu_power_governor_low = powersave {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.452956] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.cpu_power_management = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.453143] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.453313] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.device_detach_attempts = 8 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.453476] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.device_detach_timeout = 20 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.453644] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.disk_cachemodes = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.453804] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.disk_prefix = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.453971] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.enabled_perf_events = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.454146] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.file_backed_memory = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.454314] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.gid_maps = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.454473] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.hw_disk_discard = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.454630] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.hw_machine_type = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.454806] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.images_rbd_ceph_conf = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.454978] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.455180] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.455367] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.images_rbd_glance_store_name = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.455540] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.images_rbd_pool = rbd {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.455714] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.images_type = default {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.455874] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.images_volume_group = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.456048] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.inject_key = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.456218] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.inject_partition = -2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.456383] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.inject_password = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.456545] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.iscsi_iface = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.456708] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.iser_use_multipath = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.456870] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_bandwidth = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.457043] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.457211] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_downtime = 500 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.457378] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.457569] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.457736] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_inbound_addr = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.457899] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.458087] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_permit_post_copy = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.458258] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_scheme = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.458434] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_timeout_action = abort {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.458601] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_tunnelled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.458762] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_uri = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.458926] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.live_migration_with_native_tls = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.459100] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.max_queues = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.459269] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.459508] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.459674] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.nfs_mount_options = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.459969] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.460161] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.460336] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.num_iser_scan_tries = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.460501] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.num_memory_encrypted_guests = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.460668] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.460833] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.num_pcie_ports = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.461008] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.num_volume_scan_tries = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.461184] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.pmem_namespaces = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.461347] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.quobyte_client_cfg = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.461632] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.461806] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rbd_connect_timeout = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.461974] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.462157] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.462324] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rbd_secret_uuid = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.462483] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rbd_user = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.462647] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.462819] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.remote_filesystem_transport = ssh {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.462988] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rescue_image_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.463190] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rescue_kernel_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.463355] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rescue_ramdisk_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.463527] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.463687] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.rx_queue_size = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.463855] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.smbfs_mount_options = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.464144] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.464322] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.snapshot_compression = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.464493] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.snapshot_image_format = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.464717] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.464889] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.sparse_logical_volumes = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.465069] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.swtpm_enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.465281] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.swtpm_group = tss {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.465461] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.swtpm_user = tss {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.465636] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.sysinfo_serial = unique {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.465799] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.tb_cache_size = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.465959] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.tx_queue_size = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.466143] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.uid_maps = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.466312] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.use_virtio_for_bridges = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.466483] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.virt_type = kvm {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.466654] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.volume_clear = zero {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.466820] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.volume_clear_size = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.466988] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.volume_use_multipath = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.467165] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.vzstorage_cache_path = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.467339] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.467510] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.vzstorage_mount_group = qemu {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.467677] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.vzstorage_mount_opts = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.467847] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.468136] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.468321] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.vzstorage_mount_user = stack {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.468488] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.468662] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.468837] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.auth_type = password {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.469008] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.469181] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.469352] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.469511] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.connect_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.469670] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.connect_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.469841] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.default_floating_pool = public {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.470041] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.endpoint_override = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.470205] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.extension_sync_interval = 600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.470339] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.http_retries = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.470505] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.470667] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.470827] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.max_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.470998] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.471172] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.min_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.471345] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.ovs_bridge = br-int {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.471514] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.physnets = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.471684] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.region_name = RegionOne {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.471846] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.retriable_status_codes = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.472026] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.service_metadata_proxy = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.472194] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.service_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.472366] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.service_type = network {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.472529] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.472685] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.status_code_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.472843] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.status_code_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.473024] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.473224] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.473393] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] neutron.version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.473568] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] notifications.bdms_in_notifications = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.473748] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] notifications.default_level = INFO {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.473926] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] notifications.notification_format = unversioned {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.474106] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] notifications.notify_on_state_change = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.474290] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.474467] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] pci.alias = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.474640] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] pci.device_spec = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.474807] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] pci.report_in_placement = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.474980] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.475193] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.auth_type = password {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.475377] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.475540] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.475701] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.475865] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.476034] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.connect_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.476221] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.connect_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.476391] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.default_domain_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.476550] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.default_domain_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.476707] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.domain_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.476865] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.domain_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.477035] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.endpoint_override = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.477202] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.477364] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.477522] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.max_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.477679] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.min_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.477846] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.password = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.478021] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.project_domain_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.478189] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.project_domain_name = Default {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.478358] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.project_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.478527] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.project_name = service {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.478695] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.region_name = RegionOne {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.478855] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.retriable_status_codes = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.479024] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.service_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.479199] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.service_type = placement {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.479367] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.479525] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.status_code_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.479687] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.status_code_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.479848] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.system_scope = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.480013] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.480183] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.trust_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.480346] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.user_domain_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.480515] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.user_domain_name = Default {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.480673] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.user_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.480842] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.username = placement {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.481030] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.481197] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] placement.version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.481379] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.cores = 20 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.481547] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.count_usage_from_placement = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.481720] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.481895] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.injected_file_content_bytes = 10240 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.482075] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.injected_file_path_length = 255 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.482247] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.injected_files = 5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.482416] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.instances = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.482583] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.key_pairs = 100 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.482751] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.metadata_items = 128 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.482918] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.ram = 51200 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.483121] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.recheck_quota = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.483304] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.server_group_members = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.483471] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] quota.server_groups = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.483645] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.483810] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.483974] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.image_metadata_prefilter = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.484152] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.484324] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.max_attempts = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.484486] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.max_placement_results = 1000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.484650] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.484811] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.query_placement_for_image_type_support = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.484971] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.485194] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] scheduler.workers = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.485387] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.485565] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.485746] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.485918] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.486123] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.486308] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.486477] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.486669] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.486838] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.host_subset_size = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.487010] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.487181] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.487351] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.487518] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.isolated_hosts = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.487682] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.isolated_images = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.487845] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.488013] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.488196] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.488363] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.pci_in_placement = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.488529] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.488690] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.488853] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.489022] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.489191] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.489355] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.489517] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.track_instance_changes = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.489692] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.489862] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] metrics.required = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.490034] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] metrics.weight_multiplier = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.490204] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.490391] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] metrics.weight_setting = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.490697] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.490873] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] serial_console.enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.491064] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] serial_console.port_range = 10000:20000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.491243] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.491416] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.491586] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] serial_console.serialproxy_port = 6083 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.491756] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.491931] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.auth_type = password {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.492104] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.492270] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.492452] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.492687] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.492863] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.493050] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.send_service_user_token = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.493225] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.493389] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] service_user.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.493562] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.agent_enabled = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.493740] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.494072] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.494273] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.494448] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.html5proxy_port = 6082 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.494613] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.image_compression = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.494773] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.jpeg_compression = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.494931] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.playback_compression = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.495145] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.server_listen = 127.0.0.1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.495329] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.495493] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.streaming_mode = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.495718] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] spice.zlib_compression = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.495922] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] upgrade_levels.baseapi = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.496114] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] upgrade_levels.compute = auto {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.496283] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] upgrade_levels.conductor = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.496442] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] upgrade_levels.scheduler = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.496607] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.496771] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.auth_type = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.496931] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.497102] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.497271] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.497440] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.497594] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.497756] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.497914] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vendordata_dynamic_auth.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.498100] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.api_retry_count = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.498270] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.ca_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.498440] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.cache_prefix = devstack-image-cache {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.498606] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.cluster_name = testcl1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.498771] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.connection_pool_size = 10 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.498928] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.console_delay_seconds = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.499108] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.datastore_regex = ^datastore.* {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.499324] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.499497] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.host_password = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.499666] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.host_port = 443 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.499837] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.host_username = administrator@vsphere.local {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.500014] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.insecure = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.500190] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.integration_bridge = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.500359] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.maximum_objects = 100 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.500520] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.pbm_default_policy = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.500683] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.pbm_enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.500840] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.pbm_wsdl_location = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.501013] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.501179] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.serial_port_proxy_uri = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.501337] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.serial_port_service_uri = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.501506] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.task_poll_interval = 0.5 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.501678] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.use_linked_clone = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.501846] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.vnc_keymap = en-us {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.502022] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.vnc_port = 5900 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.502194] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vmware.vnc_port_total = 10000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.502384] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.auth_schemes = ['none'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.502557] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.502871] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.503098] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.503301] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.novncproxy_port = 6080 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.503483] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.server_listen = 127.0.0.1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.503656] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.503820] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.vencrypt_ca_certs = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.503978] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.vencrypt_client_cert = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.504155] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vnc.vencrypt_client_key = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.504341] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.504507] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.disable_deep_image_inspection = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.504672] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.504835] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.504998] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.505204] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.disable_rootwrap = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.505379] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.enable_numa_live_migration = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.505543] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.505706] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.505869] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.506046] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.libvirt_disable_apic = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.506231] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.506402] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.506564] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.506724] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.506882] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.507070] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.507244] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.507410] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.507570] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.507734] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.507921] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.508116] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.client_socket_timeout = 900 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.508295] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.default_pool_size = 1000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.508460] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.keep_alive = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.508628] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.max_header_line = 16384 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.508814] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.secure_proxy_ssl_header = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.508984] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.ssl_ca_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.509163] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.ssl_cert_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.509329] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.ssl_key_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.509496] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.tcp_keepidle = 600 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.509671] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.509842] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] zvm.ca_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.510009] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] zvm.cloud_connector_url = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.510321] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.510500] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] zvm.reachable_timeout = 300 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.510683] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.enforce_new_defaults = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.510884] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.enforce_scope = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.511083] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.policy_default_rule = default {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.511277] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.511452] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.policy_file = policy.yaml {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.511627] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.511792] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.511954] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.512129] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.512300] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.512469] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.512645] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.512820] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.connection_string = messaging:// {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.513008] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.enabled = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.513227] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.es_doc_type = notification {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.513405] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.es_scroll_size = 10000 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.513579] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.es_scroll_time = 2m {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.513746] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.filter_error_trace = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.513917] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.hmac_keys = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.514100] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.sentinel_service_name = mymaster {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.514274] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.socket_timeout = 0.1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.514440] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.trace_requests = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.514602] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler.trace_sqlalchemy = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.514779] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler_jaeger.process_tags = {} {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.514939] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler_jaeger.service_name_prefix = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.515144] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] profiler_otlp.service_name_prefix = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.515328] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] remote_debug.host = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.515489] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] remote_debug.port = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.515670] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.515833] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.515996] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.516176] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.516342] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.516505] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.516664] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.516826] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.516988] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.517177] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.517344] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.517516] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.517684] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.517855] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.518036] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.518209] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.518375] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.518551] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.518714] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.518877] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.519051] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.519220] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.519385] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.519554] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.519717] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.519878] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.520052] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.520219] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.520387] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.520551] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.ssl = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.520724] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.520894] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.521068] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.521244] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.521418] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.ssl_version = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.521581] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.521767] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.521936] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_notifications.retry = -1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.522134] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.522315] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_messaging_notifications.transport_url = **** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.522489] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.auth_section = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.522654] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.auth_type = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.522815] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.cafile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.522979] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.certfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.523199] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.collect_timing = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.523371] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.connect_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.523531] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.connect_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.523692] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.endpoint_id = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.523851] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.endpoint_override = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.524025] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.insecure = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.524192] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.keyfile = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.524354] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.max_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.524512] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.min_version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.524669] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.region_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.524828] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.retriable_status_codes = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.524987] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.service_name = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.525193] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.service_type = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.525367] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.split_loggers = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.525528] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.status_code_retries = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.525686] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.status_code_retry_delay = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.525843] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.timeout = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.525998] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.valid_interfaces = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.526192] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_limit.version = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.526347] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_reports.file_event_handler = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.526511] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.526669] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] oslo_reports.log_dir = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.526836] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.526995] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.527190] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.527326] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.527490] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.527647] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.527814] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.527972] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_ovs_privileged.group = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.528150] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.528323] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.528484] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.528641] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] vif_plug_ovs_privileged.user = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.528807] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_linux_bridge.flat_interface = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.528982] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.529174] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.529347] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.529518] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.529683] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.529846] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.530008] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.530194] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.530369] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_ovs.isolate_vif = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.530539] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.530703] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.530871] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.531049] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_ovs.ovsdb_interface = native {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.531214] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_vif_ovs.per_port_bridge = False {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.531381] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_brick.lock_path = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.531542] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.531699] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.531866] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] privsep_osbrick.capabilities = [21] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.532032] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] privsep_osbrick.group = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.532195] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] privsep_osbrick.helper_command = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.532362] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.532524] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.532679] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] privsep_osbrick.user = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.532849] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.533031] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] nova_sys_admin.group = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.533230] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] nova_sys_admin.helper_command = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.533406] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.533570] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.533729] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] nova_sys_admin.user = None {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 581.533857] env[68673]: DEBUG oslo_service.service [None req-225c7628-4e91-4885-821a-52db083f716d None None] ******************************************************************************** {{(pid=68673) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 581.534561] env[68673]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 581.544201] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Getting list of instances from cluster (obj){ [ 581.544201] env[68673]: value = "domain-c8" [ 581.544201] env[68673]: _type = "ClusterComputeResource" [ 581.544201] env[68673]: } {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 581.545457] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e09694-f121-4848-ae02-02761a3e3612 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.554605] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Got total of 0 instances {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 581.555163] env[68673]: WARNING nova.virt.vmwareapi.driver [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 581.555625] env[68673]: INFO nova.virt.node [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Generated node identity fd6b1504-0fb7-49fe-8051-ab853a390b4e [ 581.555836] env[68673]: INFO nova.virt.node [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Wrote node identity fd6b1504-0fb7-49fe-8051-ab853a390b4e to /opt/stack/data/n-cpu-1/compute_id [ 581.567510] env[68673]: WARNING nova.compute.manager [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Compute nodes ['fd6b1504-0fb7-49fe-8051-ab853a390b4e'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 581.600881] env[68673]: INFO nova.compute.manager [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 581.624226] env[68673]: WARNING nova.compute.manager [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 581.624667] env[68673]: DEBUG oslo_concurrency.lockutils [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.624899] env[68673]: DEBUG oslo_concurrency.lockutils [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.625119] env[68673]: DEBUG oslo_concurrency.lockutils [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.625255] env[68673]: DEBUG nova.compute.resource_tracker [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 581.626321] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0addc97-0b39-4d7e-8598-4052ea198c21 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.634924] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7420960-3577-4479-8c79-fd1f1ed334d3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.648983] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a252080-f1dc-4896-9b08-1f8d8e273cbe {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.655243] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcfed18-590c-42fd-acf4-9b108c6a59ad {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.685692] env[68673]: DEBUG nova.compute.resource_tracker [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180931MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 581.685842] env[68673]: DEBUG oslo_concurrency.lockutils [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.686039] env[68673]: DEBUG oslo_concurrency.lockutils [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.698183] env[68673]: WARNING nova.compute.resource_tracker [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] No compute node record for cpu-1:fd6b1504-0fb7-49fe-8051-ab853a390b4e: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host fd6b1504-0fb7-49fe-8051-ab853a390b4e could not be found. [ 581.710980] env[68673]: INFO nova.compute.resource_tracker [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: fd6b1504-0fb7-49fe-8051-ab853a390b4e [ 581.767140] env[68673]: DEBUG nova.compute.resource_tracker [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 581.767309] env[68673]: DEBUG nova.compute.resource_tracker [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 581.869628] env[68673]: INFO nova.scheduler.client.report [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] [req-d3b2b132-e85c-412f-9053-b20e4e5a1373] Created resource provider record via placement API for resource provider with UUID fd6b1504-0fb7-49fe-8051-ab853a390b4e and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 581.886449] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc24928-b337-48ba-af95-f1ff573128d8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.893881] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fe65bd-7ea4-41e8-a8dc-566cb3c81aa8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.923353] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151ba03b-8421-49e3-874b-1d276ab714b3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.930494] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c0d946-bcc5-4558-985c-6fbb49c9ceab {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.944636] env[68673]: DEBUG nova.compute.provider_tree [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Updating inventory in ProviderTree for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 581.985341] env[68673]: DEBUG nova.scheduler.client.report [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Updated inventory for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 581.985656] env[68673]: DEBUG nova.compute.provider_tree [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Updating resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e generation from 0 to 1 during operation: update_inventory {{(pid=68673) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 581.985847] env[68673]: DEBUG nova.compute.provider_tree [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Updating inventory in ProviderTree for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 582.033629] env[68673]: DEBUG nova.compute.provider_tree [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Updating resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e generation from 1 to 2 during operation: update_traits {{(pid=68673) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 582.050899] env[68673]: DEBUG nova.compute.resource_tracker [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 582.051101] env[68673]: DEBUG oslo_concurrency.lockutils [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.365s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.051266] env[68673]: DEBUG nova.service [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Creating RPC server for service compute {{(pid=68673) start /opt/stack/nova/nova/service.py:182}} [ 582.064050] env[68673]: DEBUG nova.service [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] Join ServiceGroup membership for this service compute {{(pid=68673) start /opt/stack/nova/nova/service.py:199}} [ 582.064306] env[68673]: DEBUG nova.servicegroup.drivers.db [None req-86b81e1b-979f-492b-a5b6-0c1b37788878 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68673) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 591.370931] env[68673]: DEBUG dbcounter [-] [68673] Writing DB stats nova_cell0:SELECT=1 {{(pid=68673) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 591.372369] env[68673]: DEBUG dbcounter [-] [68673] Writing DB stats nova_cell1:SELECT=1 {{(pid=68673) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 612.065821] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 612.081026] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Getting list of instances from cluster (obj){ [ 612.081026] env[68673]: value = "domain-c8" [ 612.081026] env[68673]: _type = "ClusterComputeResource" [ 612.081026] env[68673]: } {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 612.081026] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b661469-359a-4bf0-a799-93994969be2b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.092361] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Got total of 0 instances {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 612.092605] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 612.092937] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Getting list of instances from cluster (obj){ [ 612.092937] env[68673]: value = "domain-c8" [ 612.092937] env[68673]: _type = "ClusterComputeResource" [ 612.092937] env[68673]: } {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 612.093890] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e65df7-1981-46ce-a649-bfdde63d43f9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.102586] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Got total of 0 instances {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 620.259044] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquiring lock "f74fc310-4045-448f-93f6-96196d5f38b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.259398] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Lock "f74fc310-4045-448f-93f6-96196d5f38b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.282159] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 620.415504] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.416149] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.417954] env[68673]: INFO nova.compute.claims [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.586697] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df8e3d3-04c9-44fe-bd0c-cf225c25771f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.596325] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3257e48b-956f-4149-b01e-bed4744ac8e0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.644981] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb7a6a6-d678-4de4-91ac-0ce6d1159adc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.653437] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c664ab5-18a1-46b4-be52-13c6d5c2069e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.669786] env[68673]: DEBUG nova.compute.provider_tree [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.684532] env[68673]: DEBUG nova.scheduler.client.report [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 620.711886] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.296s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.713531] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 620.761818] env[68673]: DEBUG nova.compute.utils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.764316] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 620.764719] env[68673]: DEBUG nova.network.neutron [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 620.780639] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 620.865737] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 622.900694] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 622.900694] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 622.901045] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.901045] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 622.902151] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.903686] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 622.903686] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 622.903686] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 622.903686] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 622.903686] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 622.903876] env[68673]: DEBUG nova.virt.hardware [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 622.904868] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2538335e-a25e-4f3d-af84-95e9a73c3071 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.915243] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58caeb5-1786-4393-aedf-9f858c464c5f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.940321] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768b9932-4bfe-4852-bf24-853caa0ea2a6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.969733] env[68673]: DEBUG nova.policy [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d516063b3b748479a1553a00a4c8ffd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34808c6750ed4b52ba1c78bdcb7d1163', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 623.835535] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "d7ad9f48-d538-4bc4-b911-6e564cd5f457" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.835535] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "d7ad9f48-d538-4bc4-b911-6e564cd5f457" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.867456] env[68673]: DEBUG nova.network.neutron [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Successfully created port: 78aad8b1-f202-4199-ac33-69ae2df6a2ed {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.872531] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 623.952667] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.953041] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.956996] env[68673]: INFO nova.compute.claims [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.107097] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b10124-6318-4b62-878e-fcb117a04bbf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.117348] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf66692a-85c5-45fc-9b64-5424a96e163c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.160707] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d6eeed-e031-40c9-b240-ddc1172d541a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.170501] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a10963-9699-4c3d-bf23-68074b15284d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.192512] env[68673]: DEBUG nova.compute.provider_tree [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.205166] env[68673]: DEBUG nova.scheduler.client.report [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 624.224485] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.272s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.225078] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 624.291782] env[68673]: DEBUG nova.compute.utils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 624.294999] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 624.295415] env[68673]: DEBUG nova.network.neutron [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 624.312305] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 624.404643] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 624.442171] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 624.445439] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 624.445439] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.445439] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 624.445439] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.445439] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 624.446638] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 624.446638] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 624.446638] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 624.446638] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 624.446638] env[68673]: DEBUG nova.virt.hardware [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 624.446784] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebcbb5f-4996-4212-bafb-4d1057fb5127 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.458104] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0439cde-4718-496a-b232-7be1399836d1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.673831] env[68673]: DEBUG nova.policy [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10987b1d09db49219525f9846638c55e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eaf47d7aa4b64fa58f8c1b3f1fcc02df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 625.521728] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquiring lock "bfd5ac40-9e14-473a-8f14-895534a4642e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.522959] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "bfd5ac40-9e14-473a-8f14-895534a4642e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.539531] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 625.549148] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquiring lock "7fd1f100-addc-4319-acf8-13f19a4f7b3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.549568] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Lock "7fd1f100-addc-4319-acf8-13f19a4f7b3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.565943] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 625.648530] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.648983] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.650543] env[68673]: INFO nova.compute.claims [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.659294] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.802412] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22bddfca-1cf6-4b52-8ee2-bc242973c0fe {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.815593] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6154226a-b083-44f5-a5df-32993abc7470 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.854279] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54e05f2-d126-43c2-9ab9-0921bcd98dd2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.865294] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b020e2d4-26d3-410e-bb9c-5b6bb906d7d2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.881740] env[68673]: DEBUG nova.compute.provider_tree [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.896345] env[68673]: DEBUG nova.scheduler.client.report [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 625.922490] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.274s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.923029] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 625.927756] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.268s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.929102] env[68673]: INFO nova.compute.claims [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.965644] env[68673]: DEBUG nova.compute.utils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.967661] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 625.967883] env[68673]: DEBUG nova.network.neutron [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 625.980284] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 626.075762] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 626.116235] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 626.116416] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 626.116564] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.116747] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 626.116891] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.117046] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 626.117268] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 626.117483] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 626.117771] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 626.117771] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 626.117999] env[68673]: DEBUG nova.virt.hardware [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 626.119126] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a9ff34-56b4-46a8-a6c6-c7de230ee3c1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.130050] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e70b6e-1bcb-4364-b987-7add87b2148b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.148962] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16c0c8c-ae44-438b-a092-653de9a00143 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.159592] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ece21b-62a2-4ef4-a381-8139b6987081 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.165289] env[68673]: DEBUG nova.policy [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '600bb3bd1ede414eb05a1e98d7cb8037', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21020a1eaa8b4154b72028b6fcf37c64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 626.199185] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148ae305-5d1d-4658-83fe-e4a1e0a2517a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.207709] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1beed65-c16a-4ca4-8efe-dd81b1db2327 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.222318] env[68673]: DEBUG nova.compute.provider_tree [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.236530] env[68673]: DEBUG nova.scheduler.client.report [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 626.254483] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.328s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.254975] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 626.263595] env[68673]: DEBUG nova.network.neutron [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Successfully created port: 90b420cc-46c8-4ef3-a2b8-a4502286e719 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 626.310038] env[68673]: DEBUG nova.compute.utils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 626.319136] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 626.319136] env[68673]: DEBUG nova.network.neutron [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 626.338397] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 626.450202] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 626.466700] env[68673]: DEBUG nova.policy [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f3c7eb579b14a9eb76bdadf9a559a92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9d3af6cdb4f4f53886344d55e09f557', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 626.484703] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 626.484947] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 626.485120] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.485309] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 626.485454] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.485600] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 626.485803] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 626.485971] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 626.486229] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 626.486653] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 626.486653] env[68673]: DEBUG nova.virt.hardware [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 626.487509] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab9375e-61ae-4112-a2f4-67c3115e1ede {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.497096] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78cd7f1-3d37-4557-a13c-9fcede82a306 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.683179] env[68673]: DEBUG nova.network.neutron [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Successfully updated port: 78aad8b1-f202-4199-ac33-69ae2df6a2ed {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 626.702656] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquiring lock "refresh_cache-f74fc310-4045-448f-93f6-96196d5f38b2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.702793] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquired lock "refresh_cache-f74fc310-4045-448f-93f6-96196d5f38b2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.702945] env[68673]: DEBUG nova.network.neutron [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 626.859713] env[68673]: DEBUG nova.network.neutron [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.915400] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquiring lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.915515] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.933087] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 626.994237] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.994480] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.996285] env[68673]: INFO nova.compute.claims [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.145553] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e350299-993e-439d-a27b-3c51a532b48d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.154024] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dad862-67eb-41ef-9fb6-5386681be4aa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.190355] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef697e1c-05ec-4300-9ba4-97b9bda4f81a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.199016] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d43217-8dac-447c-a1d0-f499f09617ca {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.215321] env[68673]: DEBUG nova.compute.provider_tree [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.224351] env[68673]: DEBUG nova.scheduler.client.report [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 627.240729] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.246s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.241233] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 627.275776] env[68673]: DEBUG nova.compute.utils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 627.277332] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 627.277474] env[68673]: DEBUG nova.network.neutron [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 627.291692] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 627.364995] env[68673]: DEBUG nova.network.neutron [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Successfully created port: 969c0bca-8b82-4de1-a094-bbd1ce7f7c83 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.392825] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 627.429761] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 627.429761] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 627.429906] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 627.430114] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 627.430182] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 627.430278] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 627.430555] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 627.430634] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 627.430875] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 627.430963] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 627.431700] env[68673]: DEBUG nova.virt.hardware [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 627.436076] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c98db32-c626-4f31-980e-71925339d394 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.447298] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f53a9cc-e1cc-4cd6-8b44-b75ea994e0cc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.591452] env[68673]: DEBUG nova.policy [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aefe106df4e44e29b0c33b6420d06d95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9003bf63f3a4be292ec435f900e4c17', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 627.907839] env[68673]: DEBUG nova.network.neutron [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Updating instance_info_cache with network_info: [{"id": "78aad8b1-f202-4199-ac33-69ae2df6a2ed", "address": "fa:16:3e:5d:dd:13", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78aad8b1-f2", "ovs_interfaceid": "78aad8b1-f202-4199-ac33-69ae2df6a2ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.927849] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Releasing lock "refresh_cache-f74fc310-4045-448f-93f6-96196d5f38b2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.927849] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Instance network_info: |[{"id": "78aad8b1-f202-4199-ac33-69ae2df6a2ed", "address": "fa:16:3e:5d:dd:13", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78aad8b1-f2", "ovs_interfaceid": "78aad8b1-f202-4199-ac33-69ae2df6a2ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 627.928015] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:dd:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78aad8b1-f202-4199-ac33-69ae2df6a2ed', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 627.945644] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 627.946904] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04f73b32-8356-495d-bec3-4d2041ae7d99 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.959085] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Created folder: OpenStack in parent group-v4. [ 627.959273] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Creating folder: Project (34808c6750ed4b52ba1c78bdcb7d1163). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 627.959498] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccb0ca44-d46a-49cd-8d35-ff5e736f402b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.968807] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Created folder: Project (34808c6750ed4b52ba1c78bdcb7d1163) in parent group-v685311. [ 627.968996] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Creating folder: Instances. Parent ref: group-v685312. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 627.969236] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a1919d6-4a37-44a4-8ac0-2de2a78ebeff {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.977929] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Created folder: Instances in parent group-v685312. [ 627.978203] env[68673]: DEBUG oslo.service.loopingcall [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 627.978390] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 627.978934] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-069dcefe-1470-40ab-873b-b19aee5015f0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.000485] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.000485] env[68673]: value = "task-3433409" [ 628.000485] env[68673]: _type = "Task" [ 628.000485] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.011425] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433409, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.353363] env[68673]: DEBUG nova.network.neutron [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Successfully created port: 3dc909ba-349e-4946-a978-33ef591e8612 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.390230] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquiring lock "02517d31-0830-4e75-bde3-5f2e939f1328" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.390772] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "02517d31-0830-4e75-bde3-5f2e939f1328" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.405295] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 628.478208] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.478208] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.478897] env[68673]: INFO nova.compute.claims [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.512253] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433409, 'name': CreateVM_Task, 'duration_secs': 0.340769} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.512459] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 628.524446] env[68673]: DEBUG oslo_vmware.service [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b24bdcd-05ea-4d09-8ce7-055b1175e58e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.531870] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.532022] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.533666] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 628.535274] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a6f04ff-29e3-4f72-ada1-f014c4a4a898 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.543435] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Waiting for the task: (returnval){ [ 628.543435] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]529c4dcd-2d9d-e3e3-8e3e-b40538434e5a" [ 628.543435] env[68673]: _type = "Task" [ 628.543435] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.552483] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]529c4dcd-2d9d-e3e3-8e3e-b40538434e5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.647623] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12551f20-5d58-4bce-a3cd-1b0b94811aa7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.655147] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea0788e-ca05-42a0-8928-8b126c1025a7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.689042] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e8dc1b-4338-4bfe-bdb4-7cf39295f18b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.696430] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86316b2-909c-465a-bcb2-bd90b1485366 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.709935] env[68673]: DEBUG nova.compute.provider_tree [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.722763] env[68673]: DEBUG nova.scheduler.client.report [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 628.743675] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.267s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.744376] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 628.797811] env[68673]: DEBUG nova.compute.utils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 628.799159] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 628.799320] env[68673]: DEBUG nova.network.neutron [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 628.814840] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 628.914834] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 628.920672] env[68673]: DEBUG nova.compute.manager [req-9e8a590a-42b0-44c9-b21e-4eaf4a1e65f3 req-83d816e3-a9d4-4fc9-b3b4-c7b9b7f76cfe service nova] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Received event network-vif-plugged-78aad8b1-f202-4199-ac33-69ae2df6a2ed {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 628.920880] env[68673]: DEBUG oslo_concurrency.lockutils [req-9e8a590a-42b0-44c9-b21e-4eaf4a1e65f3 req-83d816e3-a9d4-4fc9-b3b4-c7b9b7f76cfe service nova] Acquiring lock "f74fc310-4045-448f-93f6-96196d5f38b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.921265] env[68673]: DEBUG oslo_concurrency.lockutils [req-9e8a590a-42b0-44c9-b21e-4eaf4a1e65f3 req-83d816e3-a9d4-4fc9-b3b4-c7b9b7f76cfe service nova] Lock "f74fc310-4045-448f-93f6-96196d5f38b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.921515] env[68673]: DEBUG oslo_concurrency.lockutils [req-9e8a590a-42b0-44c9-b21e-4eaf4a1e65f3 req-83d816e3-a9d4-4fc9-b3b4-c7b9b7f76cfe service nova] Lock "f74fc310-4045-448f-93f6-96196d5f38b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.921615] env[68673]: DEBUG nova.compute.manager [req-9e8a590a-42b0-44c9-b21e-4eaf4a1e65f3 req-83d816e3-a9d4-4fc9-b3b4-c7b9b7f76cfe service nova] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] No waiting events found dispatching network-vif-plugged-78aad8b1-f202-4199-ac33-69ae2df6a2ed {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 628.922051] env[68673]: WARNING nova.compute.manager [req-9e8a590a-42b0-44c9-b21e-4eaf4a1e65f3 req-83d816e3-a9d4-4fc9-b3b4-c7b9b7f76cfe service nova] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Received unexpected event network-vif-plugged-78aad8b1-f202-4199-ac33-69ae2df6a2ed for instance with vm_state building and task_state spawning. [ 628.953896] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 628.954191] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 628.954367] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.954556] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 628.954701] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.954845] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 628.955064] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 628.955229] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 628.955766] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 628.955766] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 628.955766] env[68673]: DEBUG nova.virt.hardware [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 628.957101] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049ef4df-438c-4c19-a01a-d8a5e6f1ac08 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.965362] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6ba1f2-ef5b-4241-b57b-e2cd8a4de32e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.054140] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.054453] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 629.054684] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.054826] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.055248] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 629.055509] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e117e49-fe3e-4650-8300-9bf908c4fb8f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.073636] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 629.073826] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 629.074690] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92be260b-e821-4388-835c-db7352401e7f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.081862] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c660e2f-dee8-414e-bb2c-6459f9a9187d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.087685] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Waiting for the task: (returnval){ [ 629.087685] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5267e8e0-4b5f-563e-bf66-42a06fe5fde6" [ 629.087685] env[68673]: _type = "Task" [ 629.087685] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.095710] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5267e8e0-4b5f-563e-bf66-42a06fe5fde6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.120166] env[68673]: DEBUG nova.policy [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c89f5966a6a4344aba8eebde540c40e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f22569fb7ad84645814cee49e6895dcb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 629.174478] env[68673]: DEBUG nova.network.neutron [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Successfully updated port: 969c0bca-8b82-4de1-a094-bbd1ce7f7c83 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.191399] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquiring lock "refresh_cache-7fd1f100-addc-4319-acf8-13f19a4f7b3c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.191521] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquired lock "refresh_cache-7fd1f100-addc-4319-acf8-13f19a4f7b3c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.191669] env[68673]: DEBUG nova.network.neutron [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 629.325918] env[68673]: DEBUG nova.network.neutron [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Successfully created port: f493af11-3d45-4935-ab09-fae40e33ddb1 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.512463] env[68673]: DEBUG nova.network.neutron [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.603176] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 629.603176] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Creating directory with path [datastore1] vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 629.603176] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab90459c-cb7f-4892-b454-793f05858820 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.623269] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Created directory with path [datastore1] vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 629.624899] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Fetch image to [datastore1] vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 629.626693] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 629.626693] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6381df-2f87-470a-96c4-d2291196bce0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.635200] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac31af8c-39c8-4540-8144-3b38a2a6fd07 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.653009] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca922a17-d98b-485b-afd1-f6c8dd391b6e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.696511] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09e18e1-0fe2-4811-813d-97ea0340a00a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.701312] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e19d9bb0-53f4-4e0e-93b1-5693097a4259 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.733101] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 629.825797] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 629.891882] env[68673]: DEBUG nova.network.neutron [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Successfully updated port: 90b420cc-46c8-4ef3-a2b8-a4502286e719 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.896029] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 629.896029] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 629.913588] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "refresh_cache-d7ad9f48-d538-4bc4-b911-6e564cd5f457" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.913588] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "refresh_cache-d7ad9f48-d538-4bc4-b911-6e564cd5f457" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.913708] env[68673]: DEBUG nova.network.neutron [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 630.017292] env[68673]: DEBUG nova.network.neutron [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Updating instance_info_cache with network_info: [{"id": "969c0bca-8b82-4de1-a094-bbd1ce7f7c83", "address": "fa:16:3e:6d:e8:24", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969c0bca-8b", "ovs_interfaceid": "969c0bca-8b82-4de1-a094-bbd1ce7f7c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.041942] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Releasing lock "refresh_cache-7fd1f100-addc-4319-acf8-13f19a4f7b3c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.042072] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Instance network_info: |[{"id": "969c0bca-8b82-4de1-a094-bbd1ce7f7c83", "address": "fa:16:3e:6d:e8:24", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969c0bca-8b", "ovs_interfaceid": "969c0bca-8b82-4de1-a094-bbd1ce7f7c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 630.042745] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:e8:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '969c0bca-8b82-4de1-a094-bbd1ce7f7c83', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 630.061908] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Creating folder: Project (a9d3af6cdb4f4f53886344d55e09f557). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 630.062898] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c34fe9b7-7bb7-4d9f-b87f-4bd437afd32a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.077850] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Created folder: Project (a9d3af6cdb4f4f53886344d55e09f557) in parent group-v685311. [ 630.077850] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Creating folder: Instances. Parent ref: group-v685315. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 630.077850] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a359d3c-eab3-464d-8792-da2b1d3b87bc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.085033] env[68673]: DEBUG nova.network.neutron [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.100612] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Created folder: Instances in parent group-v685315. [ 630.100612] env[68673]: DEBUG oslo.service.loopingcall [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.100612] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 630.100612] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8d0e96f-57de-4d9f-8568-ef3ce82c46dc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.126595] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.126595] env[68673]: value = "task-3433412" [ 630.126595] env[68673]: _type = "Task" [ 630.126595] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.135208] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433412, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.639698] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433412, 'name': CreateVM_Task, 'duration_secs': 0.343072} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.640014] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 630.641031] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.641031] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.641176] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 630.641309] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d99eb5af-7d23-43e9-90ad-b8f96cb93bd9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.645859] env[68673]: DEBUG oslo_vmware.api [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Waiting for the task: (returnval){ [ 630.645859] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52ef69d6-ee69-c00c-ec9b-3cfb527c138f" [ 630.645859] env[68673]: _type = "Task" [ 630.645859] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.653865] env[68673]: DEBUG oslo_vmware.api [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52ef69d6-ee69-c00c-ec9b-3cfb527c138f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.908946] env[68673]: DEBUG nova.network.neutron [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Successfully created port: 40c66ea4-2bb9-4b7f-bec7-cc23266f685b {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 630.956039] env[68673]: DEBUG nova.compute.manager [req-9674e65d-7586-4888-92bc-6d8e0e7b6228 req-63e0cf5c-2d81-43cd-af5c-a0544c36d83d service nova] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Received event network-vif-plugged-969c0bca-8b82-4de1-a094-bbd1ce7f7c83 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 630.956257] env[68673]: DEBUG oslo_concurrency.lockutils [req-9674e65d-7586-4888-92bc-6d8e0e7b6228 req-63e0cf5c-2d81-43cd-af5c-a0544c36d83d service nova] Acquiring lock "7fd1f100-addc-4319-acf8-13f19a4f7b3c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.957224] env[68673]: DEBUG oslo_concurrency.lockutils [req-9674e65d-7586-4888-92bc-6d8e0e7b6228 req-63e0cf5c-2d81-43cd-af5c-a0544c36d83d service nova] Lock "7fd1f100-addc-4319-acf8-13f19a4f7b3c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.957224] env[68673]: DEBUG oslo_concurrency.lockutils [req-9674e65d-7586-4888-92bc-6d8e0e7b6228 req-63e0cf5c-2d81-43cd-af5c-a0544c36d83d service nova] Lock "7fd1f100-addc-4319-acf8-13f19a4f7b3c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.957224] env[68673]: DEBUG nova.compute.manager [req-9674e65d-7586-4888-92bc-6d8e0e7b6228 req-63e0cf5c-2d81-43cd-af5c-a0544c36d83d service nova] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] No waiting events found dispatching network-vif-plugged-969c0bca-8b82-4de1-a094-bbd1ce7f7c83 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 630.957224] env[68673]: WARNING nova.compute.manager [req-9674e65d-7586-4888-92bc-6d8e0e7b6228 req-63e0cf5c-2d81-43cd-af5c-a0544c36d83d service nova] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Received unexpected event network-vif-plugged-969c0bca-8b82-4de1-a094-bbd1ce7f7c83 for instance with vm_state building and task_state spawning. [ 631.034204] env[68673]: DEBUG nova.network.neutron [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Updating instance_info_cache with network_info: [{"id": "90b420cc-46c8-4ef3-a2b8-a4502286e719", "address": "fa:16:3e:fa:cc:cb", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b420cc-46", "ovs_interfaceid": "90b420cc-46c8-4ef3-a2b8-a4502286e719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.054226] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "refresh_cache-d7ad9f48-d538-4bc4-b911-6e564cd5f457" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.054675] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Instance network_info: |[{"id": "90b420cc-46c8-4ef3-a2b8-a4502286e719", "address": "fa:16:3e:fa:cc:cb", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b420cc-46", "ovs_interfaceid": "90b420cc-46c8-4ef3-a2b8-a4502286e719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 631.055408] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:cc:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90b420cc-46c8-4ef3-a2b8-a4502286e719', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.073587] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating folder: Project (eaf47d7aa4b64fa58f8c1b3f1fcc02df). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 631.073587] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e80a3e7-73d5-4707-a045-792fc9620f4e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.083721] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Created folder: Project (eaf47d7aa4b64fa58f8c1b3f1fcc02df) in parent group-v685311. [ 631.085033] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating folder: Instances. Parent ref: group-v685318. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 631.085033] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11b9500e-8dbb-49dc-a9df-286eb48ef3f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.095768] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Created folder: Instances in parent group-v685318. [ 631.097392] env[68673]: DEBUG oslo.service.loopingcall [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.098562] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 631.099544] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-408342a5-ae64-4138-8cf7-13d840622c28 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.129022] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.129022] env[68673]: value = "task-3433415" [ 631.129022] env[68673]: _type = "Task" [ 631.129022] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.137456] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433415, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.159185] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.159185] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 631.159793] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.639559] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433415, 'name': CreateVM_Task, 'duration_secs': 0.312298} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.640074] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 631.640762] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.640938] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.641301] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 631.641590] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-409ee616-0f58-4fc3-841a-661cc6b96ca1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.651560] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 631.651560] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5258c6f5-916a-f126-79d4-1655e834b16e" [ 631.651560] env[68673]: _type = "Task" [ 631.651560] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.667560] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5258c6f5-916a-f126-79d4-1655e834b16e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.687829] env[68673]: DEBUG nova.network.neutron [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Successfully updated port: 3dc909ba-349e-4946-a978-33ef591e8612 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 631.703275] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquiring lock "refresh_cache-bfd5ac40-9e14-473a-8f14-895534a4642e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.703404] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquired lock "refresh_cache-bfd5ac40-9e14-473a-8f14-895534a4642e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.703551] env[68673]: DEBUG nova.network.neutron [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 631.829369] env[68673]: DEBUG nova.network.neutron [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.167331] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.167331] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 632.167659] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.402950] env[68673]: DEBUG nova.network.neutron [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Successfully updated port: f493af11-3d45-4935-ab09-fae40e33ddb1 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 632.416337] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquiring lock "refresh_cache-de59505b-0bbf-41b4-8d06-65ab40e8a5a8" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.416508] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquired lock "refresh_cache-de59505b-0bbf-41b4-8d06-65ab40e8a5a8" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.416660] env[68673]: DEBUG nova.network.neutron [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 632.546643] env[68673]: DEBUG nova.network.neutron [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.550536] env[68673]: DEBUG nova.network.neutron [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Updating instance_info_cache with network_info: [{"id": "3dc909ba-349e-4946-a978-33ef591e8612", "address": "fa:16:3e:1d:b2:c0", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc909ba-34", "ovs_interfaceid": "3dc909ba-349e-4946-a978-33ef591e8612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.566052] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Releasing lock "refresh_cache-bfd5ac40-9e14-473a-8f14-895534a4642e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.566350] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Instance network_info: |[{"id": "3dc909ba-349e-4946-a978-33ef591e8612", "address": "fa:16:3e:1d:b2:c0", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc909ba-34", "ovs_interfaceid": "3dc909ba-349e-4946-a978-33ef591e8612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 632.566771] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:b2:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dc909ba-349e-4946-a978-33ef591e8612', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 632.578625] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Creating folder: Project (21020a1eaa8b4154b72028b6fcf37c64). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 632.578812] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88f33fa2-4f7b-403e-a720-4b05f13eb3bc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.589988] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Created folder: Project (21020a1eaa8b4154b72028b6fcf37c64) in parent group-v685311. [ 632.590064] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Creating folder: Instances. Parent ref: group-v685321. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 632.590302] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b65c3c1-f65b-4955-8eca-4887ba43e206 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.601825] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Created folder: Instances in parent group-v685321. [ 632.602096] env[68673]: DEBUG oslo.service.loopingcall [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 632.603601] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 632.603601] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0be980f2-2d96-4756-a014-8024dbda44c6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.628802] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 632.628802] env[68673]: value = "task-3433418" [ 632.628802] env[68673]: _type = "Task" [ 632.628802] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.639548] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433418, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.963282] env[68673]: DEBUG nova.network.neutron [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Updating instance_info_cache with network_info: [{"id": "f493af11-3d45-4935-ab09-fae40e33ddb1", "address": "fa:16:3e:67:51:e2", "network": {"id": "9fa2ef8c-28a6-4f51-985b-4fbcfe05e131", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-358580204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9003bf63f3a4be292ec435f900e4c17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf493af11-3d", "ovs_interfaceid": "f493af11-3d45-4935-ab09-fae40e33ddb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.970986] env[68673]: DEBUG nova.compute.manager [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Received event network-changed-78aad8b1-f202-4199-ac33-69ae2df6a2ed {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 632.972084] env[68673]: DEBUG nova.compute.manager [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Refreshing instance network info cache due to event network-changed-78aad8b1-f202-4199-ac33-69ae2df6a2ed. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 632.973273] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Acquiring lock "refresh_cache-f74fc310-4045-448f-93f6-96196d5f38b2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.973443] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Acquired lock "refresh_cache-f74fc310-4045-448f-93f6-96196d5f38b2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.973616] env[68673]: DEBUG nova.network.neutron [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Refreshing network info cache for port 78aad8b1-f202-4199-ac33-69ae2df6a2ed {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 632.985578] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Releasing lock "refresh_cache-de59505b-0bbf-41b4-8d06-65ab40e8a5a8" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.985578] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Instance network_info: |[{"id": "f493af11-3d45-4935-ab09-fae40e33ddb1", "address": "fa:16:3e:67:51:e2", "network": {"id": "9fa2ef8c-28a6-4f51-985b-4fbcfe05e131", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-358580204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9003bf63f3a4be292ec435f900e4c17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf493af11-3d", "ovs_interfaceid": "f493af11-3d45-4935-ab09-fae40e33ddb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 632.990381] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:51:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f493af11-3d45-4935-ab09-fae40e33ddb1', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 633.002941] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Creating folder: Project (c9003bf63f3a4be292ec435f900e4c17). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 633.003951] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8da5c10e-0d6c-4cec-813b-37f66ecca5de {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.019013] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Created folder: Project (c9003bf63f3a4be292ec435f900e4c17) in parent group-v685311. [ 633.019214] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Creating folder: Instances. Parent ref: group-v685324. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 633.019445] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9645903a-a4f6-48d2-a092-9455aa65c6e8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.028734] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Created folder: Instances in parent group-v685324. [ 633.028734] env[68673]: DEBUG oslo.service.loopingcall [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 633.028895] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 633.029366] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19313724-2d4a-4589-b2a2-dd5c02d6087a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.050139] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 633.050139] env[68673]: value = "task-3433421" [ 633.050139] env[68673]: _type = "Task" [ 633.050139] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.059478] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433421, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.141886] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433418, 'name': CreateVM_Task, 'duration_secs': 0.31484} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.142052] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 633.142739] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.142895] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.143276] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 633.143844] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91bf4b93-7726-4964-b932-3766c85b2c1e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.148971] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Waiting for the task: (returnval){ [ 633.148971] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52164bb5-0249-39a9-fb1c-09f265416d89" [ 633.148971] env[68673]: _type = "Task" [ 633.148971] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.157827] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52164bb5-0249-39a9-fb1c-09f265416d89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.257915] env[68673]: DEBUG nova.network.neutron [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Successfully updated port: 40c66ea4-2bb9-4b7f-bec7-cc23266f685b {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 633.275061] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquiring lock "refresh_cache-02517d31-0830-4e75-bde3-5f2e939f1328" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.275243] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquired lock "refresh_cache-02517d31-0830-4e75-bde3-5f2e939f1328" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.275436] env[68673]: DEBUG nova.network.neutron [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 633.450945] env[68673]: DEBUG nova.network.neutron [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.563346] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433421, 'name': CreateVM_Task, 'duration_secs': 0.391} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.564142] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 633.564142] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.661940] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.662360] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 633.662549] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.662914] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.663195] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 633.663525] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baeebac0-57c5-45ab-a14f-7d975356fe68 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.670491] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Waiting for the task: (returnval){ [ 633.670491] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524a889d-22cb-8646-be5a-331f861848e3" [ 633.670491] env[68673]: _type = "Task" [ 633.670491] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.681864] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524a889d-22cb-8646-be5a-331f861848e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.124877] env[68673]: DEBUG nova.network.neutron [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Updated VIF entry in instance network info cache for port 78aad8b1-f202-4199-ac33-69ae2df6a2ed. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 634.125292] env[68673]: DEBUG nova.network.neutron [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Updating instance_info_cache with network_info: [{"id": "78aad8b1-f202-4199-ac33-69ae2df6a2ed", "address": "fa:16:3e:5d:dd:13", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78aad8b1-f2", "ovs_interfaceid": "78aad8b1-f202-4199-ac33-69ae2df6a2ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.139321] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Releasing lock "refresh_cache-f74fc310-4045-448f-93f6-96196d5f38b2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.139887] env[68673]: DEBUG nova.compute.manager [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Received event network-vif-plugged-90b420cc-46c8-4ef3-a2b8-a4502286e719 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 634.140242] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Acquiring lock "d7ad9f48-d538-4bc4-b911-6e564cd5f457-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.140503] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Lock "d7ad9f48-d538-4bc4-b911-6e564cd5f457-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.140812] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Lock "d7ad9f48-d538-4bc4-b911-6e564cd5f457-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.141058] env[68673]: DEBUG nova.compute.manager [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] No waiting events found dispatching network-vif-plugged-90b420cc-46c8-4ef3-a2b8-a4502286e719 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 634.141541] env[68673]: WARNING nova.compute.manager [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Received unexpected event network-vif-plugged-90b420cc-46c8-4ef3-a2b8-a4502286e719 for instance with vm_state building and task_state spawning. [ 634.141835] env[68673]: DEBUG nova.compute.manager [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Received event network-changed-90b420cc-46c8-4ef3-a2b8-a4502286e719 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 634.142056] env[68673]: DEBUG nova.compute.manager [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Refreshing instance network info cache due to event network-changed-90b420cc-46c8-4ef3-a2b8-a4502286e719. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 634.142291] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Acquiring lock "refresh_cache-d7ad9f48-d538-4bc4-b911-6e564cd5f457" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.142467] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Acquired lock "refresh_cache-d7ad9f48-d538-4bc4-b911-6e564cd5f457" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.142717] env[68673]: DEBUG nova.network.neutron [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Refreshing network info cache for port 90b420cc-46c8-4ef3-a2b8-a4502286e719 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 634.185266] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.185743] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 634.186100] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.239771] env[68673]: DEBUG nova.network.neutron [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Updating instance_info_cache with network_info: [{"id": "40c66ea4-2bb9-4b7f-bec7-cc23266f685b", "address": "fa:16:3e:05:3f:31", "network": {"id": "af1b6dfc-0345-4626-b25b-6ad60139951e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1302122046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f22569fb7ad84645814cee49e6895dcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40c66ea4-2b", "ovs_interfaceid": "40c66ea4-2bb9-4b7f-bec7-cc23266f685b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.260022] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Releasing lock "refresh_cache-02517d31-0830-4e75-bde3-5f2e939f1328" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.260022] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Instance network_info: |[{"id": "40c66ea4-2bb9-4b7f-bec7-cc23266f685b", "address": "fa:16:3e:05:3f:31", "network": {"id": "af1b6dfc-0345-4626-b25b-6ad60139951e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1302122046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f22569fb7ad84645814cee49e6895dcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40c66ea4-2b", "ovs_interfaceid": "40c66ea4-2bb9-4b7f-bec7-cc23266f685b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 634.260467] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:3f:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afd3feb3-ffcc-4499-a2c2-eb6a48aefde9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40c66ea4-2bb9-4b7f-bec7-cc23266f685b', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 634.271406] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Creating folder: Project (f22569fb7ad84645814cee49e6895dcb). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 634.271406] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7105e82b-cd1a-4e83-8b44-ba18c64b186f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.281658] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Created folder: Project (f22569fb7ad84645814cee49e6895dcb) in parent group-v685311. [ 634.281848] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Creating folder: Instances. Parent ref: group-v685327. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 634.282138] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-848ff3b3-54cb-4e0f-9de5-2deff7bc62de {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.292567] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Created folder: Instances in parent group-v685327. [ 634.292812] env[68673]: DEBUG oslo.service.loopingcall [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 634.293463] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 634.293999] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2eabeac9-6f5c-4103-b220-2d60568556fe {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.318745] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 634.318745] env[68673]: value = "task-3433424" [ 634.318745] env[68673]: _type = "Task" [ 634.318745] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.328992] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433424, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.508643] env[68673]: DEBUG nova.compute.manager [req-bf832c20-0b01-476e-bb2e-983225f886aa req-139b6927-281f-4e83-b235-2e8377274b49 service nova] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Received event network-changed-969c0bca-8b82-4de1-a094-bbd1ce7f7c83 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 634.509453] env[68673]: DEBUG nova.compute.manager [req-bf832c20-0b01-476e-bb2e-983225f886aa req-139b6927-281f-4e83-b235-2e8377274b49 service nova] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Refreshing instance network info cache due to event network-changed-969c0bca-8b82-4de1-a094-bbd1ce7f7c83. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 634.509453] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf832c20-0b01-476e-bb2e-983225f886aa req-139b6927-281f-4e83-b235-2e8377274b49 service nova] Acquiring lock "refresh_cache-7fd1f100-addc-4319-acf8-13f19a4f7b3c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.509453] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf832c20-0b01-476e-bb2e-983225f886aa req-139b6927-281f-4e83-b235-2e8377274b49 service nova] Acquired lock "refresh_cache-7fd1f100-addc-4319-acf8-13f19a4f7b3c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.509453] env[68673]: DEBUG nova.network.neutron [req-bf832c20-0b01-476e-bb2e-983225f886aa req-139b6927-281f-4e83-b235-2e8377274b49 service nova] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Refreshing network info cache for port 969c0bca-8b82-4de1-a094-bbd1ce7f7c83 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 634.833631] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433424, 'name': CreateVM_Task, 'duration_secs': 0.309427} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.833906] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 634.834620] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.834775] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.835088] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 634.835637] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42f5c529-2be2-4107-9dc7-96768beb4aac {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.842491] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Waiting for the task: (returnval){ [ 634.842491] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52394214-0447-d181-afc0-446ac54e3dc2" [ 634.842491] env[68673]: _type = "Task" [ 634.842491] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.857640] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52394214-0447-d181-afc0-446ac54e3dc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.871232] env[68673]: DEBUG nova.network.neutron [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Updated VIF entry in instance network info cache for port 90b420cc-46c8-4ef3-a2b8-a4502286e719. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 634.871422] env[68673]: DEBUG nova.network.neutron [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Updating instance_info_cache with network_info: [{"id": "90b420cc-46c8-4ef3-a2b8-a4502286e719", "address": "fa:16:3e:fa:cc:cb", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b420cc-46", "ovs_interfaceid": "90b420cc-46c8-4ef3-a2b8-a4502286e719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.886025] env[68673]: DEBUG oslo_concurrency.lockutils [req-07ee0011-1c3c-462c-a764-022bc08caeba req-ee875d14-c8f3-4c0b-ac74-f79e9ec65050 service nova] Releasing lock "refresh_cache-d7ad9f48-d538-4bc4-b911-6e564cd5f457" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.082345] env[68673]: DEBUG nova.network.neutron [req-bf832c20-0b01-476e-bb2e-983225f886aa req-139b6927-281f-4e83-b235-2e8377274b49 service nova] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Updated VIF entry in instance network info cache for port 969c0bca-8b82-4de1-a094-bbd1ce7f7c83. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 635.082694] env[68673]: DEBUG nova.network.neutron [req-bf832c20-0b01-476e-bb2e-983225f886aa req-139b6927-281f-4e83-b235-2e8377274b49 service nova] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Updating instance_info_cache with network_info: [{"id": "969c0bca-8b82-4de1-a094-bbd1ce7f7c83", "address": "fa:16:3e:6d:e8:24", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969c0bca-8b", "ovs_interfaceid": "969c0bca-8b82-4de1-a094-bbd1ce7f7c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.095453] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf832c20-0b01-476e-bb2e-983225f886aa req-139b6927-281f-4e83-b235-2e8377274b49 service nova] Releasing lock "refresh_cache-7fd1f100-addc-4319-acf8-13f19a4f7b3c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.354368] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.354970] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 635.354970] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.184055] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Received event network-vif-plugged-3dc909ba-349e-4946-a978-33ef591e8612 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 636.187667] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquiring lock "bfd5ac40-9e14-473a-8f14-895534a4642e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.188877] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Lock "bfd5ac40-9e14-473a-8f14-895534a4642e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.188877] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Lock "bfd5ac40-9e14-473a-8f14-895534a4642e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.188877] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] No waiting events found dispatching network-vif-plugged-3dc909ba-349e-4946-a978-33ef591e8612 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 636.188877] env[68673]: WARNING nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Received unexpected event network-vif-plugged-3dc909ba-349e-4946-a978-33ef591e8612 for instance with vm_state building and task_state spawning. [ 636.189383] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Received event network-changed-3dc909ba-349e-4946-a978-33ef591e8612 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 636.189383] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Refreshing instance network info cache due to event network-changed-3dc909ba-349e-4946-a978-33ef591e8612. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 636.189383] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquiring lock "refresh_cache-bfd5ac40-9e14-473a-8f14-895534a4642e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.189383] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquired lock "refresh_cache-bfd5ac40-9e14-473a-8f14-895534a4642e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.189383] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Refreshing network info cache for port 3dc909ba-349e-4946-a978-33ef591e8612 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 636.573025] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Updated VIF entry in instance network info cache for port 3dc909ba-349e-4946-a978-33ef591e8612. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 636.573428] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Updating instance_info_cache with network_info: [{"id": "3dc909ba-349e-4946-a978-33ef591e8612", "address": "fa:16:3e:1d:b2:c0", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc909ba-34", "ovs_interfaceid": "3dc909ba-349e-4946-a978-33ef591e8612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.584054] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Releasing lock "refresh_cache-bfd5ac40-9e14-473a-8f14-895534a4642e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.584310] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Received event network-vif-plugged-f493af11-3d45-4935-ab09-fae40e33ddb1 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 636.584493] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquiring lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.584679] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.584866] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.585106] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] No waiting events found dispatching network-vif-plugged-f493af11-3d45-4935-ab09-fae40e33ddb1 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 636.585286] env[68673]: WARNING nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Received unexpected event network-vif-plugged-f493af11-3d45-4935-ab09-fae40e33ddb1 for instance with vm_state building and task_state spawning. [ 636.586200] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Received event network-changed-f493af11-3d45-4935-ab09-fae40e33ddb1 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 636.586200] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Refreshing instance network info cache due to event network-changed-f493af11-3d45-4935-ab09-fae40e33ddb1. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 636.586200] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquiring lock "refresh_cache-de59505b-0bbf-41b4-8d06-65ab40e8a5a8" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.586200] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquired lock "refresh_cache-de59505b-0bbf-41b4-8d06-65ab40e8a5a8" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.586691] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Refreshing network info cache for port f493af11-3d45-4935-ab09-fae40e33ddb1 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 637.241437] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Updated VIF entry in instance network info cache for port f493af11-3d45-4935-ab09-fae40e33ddb1. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 637.242150] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Updating instance_info_cache with network_info: [{"id": "f493af11-3d45-4935-ab09-fae40e33ddb1", "address": "fa:16:3e:67:51:e2", "network": {"id": "9fa2ef8c-28a6-4f51-985b-4fbcfe05e131", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-358580204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9003bf63f3a4be292ec435f900e4c17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf493af11-3d", "ovs_interfaceid": "f493af11-3d45-4935-ab09-fae40e33ddb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.259187] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Releasing lock "refresh_cache-de59505b-0bbf-41b4-8d06-65ab40e8a5a8" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.259680] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Received event network-vif-plugged-40c66ea4-2bb9-4b7f-bec7-cc23266f685b {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 637.260044] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquiring lock "02517d31-0830-4e75-bde3-5f2e939f1328-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.261244] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Lock "02517d31-0830-4e75-bde3-5f2e939f1328-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.261244] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Lock "02517d31-0830-4e75-bde3-5f2e939f1328-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.261569] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] No waiting events found dispatching network-vif-plugged-40c66ea4-2bb9-4b7f-bec7-cc23266f685b {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 637.262134] env[68673]: WARNING nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Received unexpected event network-vif-plugged-40c66ea4-2bb9-4b7f-bec7-cc23266f685b for instance with vm_state building and task_state spawning. [ 637.262134] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Received event network-changed-40c66ea4-2bb9-4b7f-bec7-cc23266f685b {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 637.262756] env[68673]: DEBUG nova.compute.manager [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Refreshing instance network info cache due to event network-changed-40c66ea4-2bb9-4b7f-bec7-cc23266f685b. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 637.263279] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquiring lock "refresh_cache-02517d31-0830-4e75-bde3-5f2e939f1328" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.264039] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Acquired lock "refresh_cache-02517d31-0830-4e75-bde3-5f2e939f1328" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.264039] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Refreshing network info cache for port 40c66ea4-2bb9-4b7f-bec7-cc23266f685b {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 637.679470] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Updated VIF entry in instance network info cache for port 40c66ea4-2bb9-4b7f-bec7-cc23266f685b. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 637.679835] env[68673]: DEBUG nova.network.neutron [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Updating instance_info_cache with network_info: [{"id": "40c66ea4-2bb9-4b7f-bec7-cc23266f685b", "address": "fa:16:3e:05:3f:31", "network": {"id": "af1b6dfc-0345-4626-b25b-6ad60139951e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1302122046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f22569fb7ad84645814cee49e6895dcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40c66ea4-2b", "ovs_interfaceid": "40c66ea4-2bb9-4b7f-bec7-cc23266f685b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.694973] env[68673]: DEBUG oslo_concurrency.lockutils [req-9a7de501-e2bb-49bb-b207-349c2b369415 req-91ea7aac-573e-4220-b0c5-3ba80dfabfe2 service nova] Releasing lock "refresh_cache-02517d31-0830-4e75-bde3-5f2e939f1328" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.793810] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.794485] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.795071] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 637.795273] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 637.826144] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 637.826367] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 637.827773] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 637.827773] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 637.827773] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 637.827773] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 637.827773] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 637.827985] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.827985] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.828329] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.828483] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.829585] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.829585] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.829585] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 637.829585] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.856829] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.857133] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.857306] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.857457] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 637.860995] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3fe5e4-0eee-4f12-a774-63d8ae7d11cc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.870706] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a14d459-5eff-4220-9aa5-7149927c2284 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.890067] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f06c93-27e9-4d2d-904e-cec0e9af8a72 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.897860] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e179bb6e-4443-49f9-9e07-9617030ae0e6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.932581] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180912MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 637.932778] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.932990] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.024304] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f74fc310-4045-448f-93f6-96196d5f38b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.024374] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d7ad9f48-d538-4bc4-b911-6e564cd5f457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.024475] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfd5ac40-9e14-473a-8f14-895534a4642e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.024577] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7fd1f100-addc-4319-acf8-13f19a4f7b3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.024691] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance de59505b-0bbf-41b4-8d06-65ab40e8a5a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.024803] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 02517d31-0830-4e75-bde3-5f2e939f1328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.054909] env[68673]: INFO nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 377657cd-9913-49ec-a0f8-a701655ff68d has allocations against this compute host but is not found in the database. [ 638.054909] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 638.054909] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 638.185503] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70df63b3-c91f-4961-9e0b-c6a2ba4c4647 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.195882] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquiring lock "377657cd-9913-49ec-a0f8-a701655ff68d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.196741] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "377657cd-9913-49ec-a0f8-a701655ff68d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.201852] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c048e2-5446-4fbf-bc14-d4d12a065835 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.235754] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd07825-3eaf-4465-a7de-94de33102aea {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.238547] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 638.247852] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f981f7-78f3-4985-bf50-e8524981a58b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.267733] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.287767] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 638.304162] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 638.304162] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.371s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.315956] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.316239] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.318127] env[68673]: INFO nova.compute.claims [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.390195] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.390433] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.404208] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 638.489757] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.551544] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ad9c76-5170-4dca-8ae6-0888b3872c7e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.559342] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47bd010-0e30-4459-a433-ae0727176830 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.592785] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6526df2b-9144-4718-883a-092915e999d7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.603269] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af1c293-abf2-47d4-a417-154e6d4bd3af {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.619027] env[68673]: DEBUG nova.compute.provider_tree [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.629829] env[68673]: DEBUG nova.scheduler.client.report [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 638.648299] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.332s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.648539] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 638.651042] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.162s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.652518] env[68673]: INFO nova.compute.claims [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.689891] env[68673]: DEBUG nova.compute.utils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 638.691217] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 638.691432] env[68673]: DEBUG nova.network.neutron [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 638.702472] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 638.785200] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 638.811063] env[68673]: DEBUG nova.policy [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7101467fab04677a301b22640fa41e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecf8f480418548d9b88933483b23b5f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 638.815192] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.815440] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.818723] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 638.819129] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 638.819223] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.819337] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 638.819500] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.819647] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 638.819947] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 638.820053] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 638.820218] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 638.820462] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 638.820812] env[68673]: DEBUG nova.virt.hardware [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 638.822340] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ed6381-5dd3-47aa-a2a2-373116dac095 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.831913] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 638.835090] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dec4ec2-f241-4239-b4b4-090bf6f2da94 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.881631] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c2ea69-b049-451c-8ab3-90ce8b0cfca7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.889711] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78378ad6-91d7-482f-8b95-35b27879048b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.896037] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.923582] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a824689e-c08a-4bf3-b8cc-64f2139ba9b1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.930952] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43796391-b5cd-47d2-8fad-9e334533be33 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.945363] env[68673]: DEBUG nova.compute.provider_tree [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.958376] env[68673]: DEBUG nova.scheduler.client.report [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 638.978437] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.978993] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 638.981400] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.085s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.983160] env[68673]: INFO nova.compute.claims [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.024214] env[68673]: DEBUG nova.compute.utils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 639.025905] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 639.026124] env[68673]: DEBUG nova.network.neutron [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 639.036364] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 639.106175] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 639.135390] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 639.135519] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 639.135662] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.136538] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 639.136538] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.136538] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 639.136730] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 639.136767] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 639.136915] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 639.137087] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 639.137267] env[68673]: DEBUG nova.virt.hardware [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.138582] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8014e380-e422-497c-a129-7ad0711dde57 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.151383] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38131a47-601c-4e6e-aabd-b038c252b25d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.197793] env[68673]: DEBUG nova.policy [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f7098825a5f4469ae441d3dde1461fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afd7de5880f44f51a43d504b9c6fe8da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 639.216666] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dfd5a2-128a-4ecd-bf60-1b658cd2da25 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.224359] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fb5f22-2ade-4dcb-9e9c-bab38ea472ad {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.257495] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0549bd92-1682-44ea-9cd5-b04b0602b995 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.265800] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ad6382-cad3-4985-97b9-0b20752eb39a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.279681] env[68673]: DEBUG nova.compute.provider_tree [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.289824] env[68673]: DEBUG nova.scheduler.client.report [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 639.311560] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.330s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.311690] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 639.372073] env[68673]: DEBUG nova.compute.utils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 639.374238] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 639.374553] env[68673]: DEBUG nova.network.neutron [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 639.386407] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 639.470978] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 639.496550] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 639.496929] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 639.497217] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.497217] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 639.497335] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.497440] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 639.497681] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 639.497817] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 639.497982] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 639.498158] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 639.498327] env[68673]: DEBUG nova.virt.hardware [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.499203] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d95046-1a27-41ee-a735-ed17ea7b7883 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.508579] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21046901-b01c-4ef9-94ca-6ecc2c7db2ae {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.591497] env[68673]: DEBUG nova.policy [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b0ed7aa0f5c413d9cc32a8bbf4724df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92e8efd351c449e8815c0ec3b6070d20', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 639.696727] env[68673]: DEBUG nova.network.neutron [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Successfully created port: ac691c8e-e9d5-4cde-a6e6-e06129944a9d {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.952332] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquiring lock "31700289-ac8c-47a9-b4e0-981b5c9df645" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.952673] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "31700289-ac8c-47a9-b4e0-981b5c9df645" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.964408] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 640.031428] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.031428] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.033945] env[68673]: INFO nova.compute.claims [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.256863] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e68364-9902-4c80-829b-a931ce84cae1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.262248] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea050a5-9c36-445c-974e-570a550c6dff {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.293875] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87749191-73cf-45e8-bd0f-83377ee22ff5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.301646] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea120fa-b0ec-485b-99d5-0170fcd1e2be {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.317179] env[68673]: DEBUG nova.compute.provider_tree [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.327122] env[68673]: DEBUG nova.scheduler.client.report [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 640.346720] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.315s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.347216] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 640.399657] env[68673]: DEBUG nova.compute.utils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 640.400785] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 640.400955] env[68673]: DEBUG nova.network.neutron [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 640.414945] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 640.500330] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 640.533130] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 640.533130] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 640.533581] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.533903] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 640.534026] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.534166] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 640.534378] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 640.534537] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 640.534703] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 640.534868] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 640.535061] env[68673]: DEBUG nova.virt.hardware [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 640.536716] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dd4323-cf1c-409d-a0ad-9f3bc1e1da49 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.545843] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73ec87e-2014-4e66-a1d8-adfa05f1ac36 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.624011] env[68673]: DEBUG nova.policy [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c77345f98bb460bbc3fdc689f588e99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a985518815e0473a8648d59a868dbfab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 640.630230] env[68673]: DEBUG nova.network.neutron [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Successfully created port: e38cde06-158b-4c6f-8fac-d7220cab1c2b {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.006509] env[68673]: DEBUG nova.network.neutron [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Successfully created port: 21de1dd5-9c92-4d55-8f8c-805fc111448f {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.137284] env[68673]: DEBUG nova.network.neutron [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Successfully created port: 2e792a23-bdb5-4319-9335-4206b5d5b761 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.504511] env[68673]: DEBUG nova.network.neutron [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Successfully updated port: ac691c8e-e9d5-4cde-a6e6-e06129944a9d {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 642.515055] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquiring lock "refresh_cache-377657cd-9913-49ec-a0f8-a701655ff68d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.515217] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquired lock "refresh_cache-377657cd-9913-49ec-a0f8-a701655ff68d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.515415] env[68673]: DEBUG nova.network.neutron [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.824813] env[68673]: DEBUG nova.network.neutron [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.396340] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.396744] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.169998] env[68673]: DEBUG nova.network.neutron [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Updating instance_info_cache with network_info: [{"id": "ac691c8e-e9d5-4cde-a6e6-e06129944a9d", "address": "fa:16:3e:b8:2a:d8", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac691c8e-e9", "ovs_interfaceid": "ac691c8e-e9d5-4cde-a6e6-e06129944a9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.185852] env[68673]: DEBUG nova.network.neutron [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Successfully updated port: e38cde06-158b-4c6f-8fac-d7220cab1c2b {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 644.187863] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Releasing lock "refresh_cache-377657cd-9913-49ec-a0f8-a701655ff68d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.188724] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Instance network_info: |[{"id": "ac691c8e-e9d5-4cde-a6e6-e06129944a9d", "address": "fa:16:3e:b8:2a:d8", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac691c8e-e9", "ovs_interfaceid": "ac691c8e-e9d5-4cde-a6e6-e06129944a9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 644.193735] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:2a:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac691c8e-e9d5-4cde-a6e6-e06129944a9d', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 644.205162] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Creating folder: Project (ecf8f480418548d9b88933483b23b5f8). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 644.207367] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88b3ae5d-4f9f-4b19-892d-55ef9fd91b26 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.208649] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "refresh_cache-837911fc-a8f2-41f9-bc0b-a3af4f29bd07" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.208800] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquired lock "refresh_cache-837911fc-a8f2-41f9-bc0b-a3af4f29bd07" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.208948] env[68673]: DEBUG nova.network.neutron [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 644.226113] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Created folder: Project (ecf8f480418548d9b88933483b23b5f8) in parent group-v685311. [ 644.226434] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Creating folder: Instances. Parent ref: group-v685330. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 644.227025] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3cae602-9521-4f2f-be11-d002ba30a1a2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.245519] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Created folder: Instances in parent group-v685330. [ 644.246431] env[68673]: DEBUG oslo.service.loopingcall [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.246431] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 644.246431] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f059585-7351-410a-8fd1-054ff7447588 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.275950] env[68673]: DEBUG nova.compute.manager [req-1ca2bee5-244d-423c-baa4-42a4d8b73420 req-e4d8bc59-6dab-443c-9259-138403459609 service nova] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Received event network-vif-plugged-ac691c8e-e9d5-4cde-a6e6-e06129944a9d {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 644.276096] env[68673]: DEBUG oslo_concurrency.lockutils [req-1ca2bee5-244d-423c-baa4-42a4d8b73420 req-e4d8bc59-6dab-443c-9259-138403459609 service nova] Acquiring lock "377657cd-9913-49ec-a0f8-a701655ff68d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.276308] env[68673]: DEBUG oslo_concurrency.lockutils [req-1ca2bee5-244d-423c-baa4-42a4d8b73420 req-e4d8bc59-6dab-443c-9259-138403459609 service nova] Lock "377657cd-9913-49ec-a0f8-a701655ff68d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.276504] env[68673]: DEBUG oslo_concurrency.lockutils [req-1ca2bee5-244d-423c-baa4-42a4d8b73420 req-e4d8bc59-6dab-443c-9259-138403459609 service nova] Lock "377657cd-9913-49ec-a0f8-a701655ff68d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.276686] env[68673]: DEBUG nova.compute.manager [req-1ca2bee5-244d-423c-baa4-42a4d8b73420 req-e4d8bc59-6dab-443c-9259-138403459609 service nova] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] No waiting events found dispatching network-vif-plugged-ac691c8e-e9d5-4cde-a6e6-e06129944a9d {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 644.276944] env[68673]: WARNING nova.compute.manager [req-1ca2bee5-244d-423c-baa4-42a4d8b73420 req-e4d8bc59-6dab-443c-9259-138403459609 service nova] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Received unexpected event network-vif-plugged-ac691c8e-e9d5-4cde-a6e6-e06129944a9d for instance with vm_state building and task_state spawning. [ 644.278755] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 644.278755] env[68673]: value = "task-3433427" [ 644.278755] env[68673]: _type = "Task" [ 644.278755] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.294938] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433427, 'name': CreateVM_Task} progress is 6%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.317432] env[68673]: DEBUG nova.network.neutron [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.455218] env[68673]: DEBUG nova.network.neutron [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Successfully updated port: 21de1dd5-9c92-4d55-8f8c-805fc111448f {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 644.480738] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "refresh_cache-c09c33d3-ae8a-4057-9f7d-6a4b4948423d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.483999] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "refresh_cache-c09c33d3-ae8a-4057-9f7d-6a4b4948423d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.483999] env[68673]: DEBUG nova.network.neutron [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 644.646543] env[68673]: DEBUG nova.network.neutron [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.717170] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquiring lock "579c535d-7061-4822-8f7f-50b36ddfd44b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.719702] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.794367] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433427, 'name': CreateVM_Task, 'duration_secs': 0.327316} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.794650] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 644.795346] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.797150] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.797675] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 644.798352] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-815bfb79-39e8-4d14-8451-9325c7b110c1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.804320] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Waiting for the task: (returnval){ [ 644.804320] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5205b724-ad3e-f0f2-16a1-a7b84b062ba1" [ 644.804320] env[68673]: _type = "Task" [ 644.804320] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.813632] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5205b724-ad3e-f0f2-16a1-a7b84b062ba1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.325207] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.325207] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 645.325207] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.457452] env[68673]: DEBUG nova.network.neutron [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Updating instance_info_cache with network_info: [{"id": "e38cde06-158b-4c6f-8fac-d7220cab1c2b", "address": "fa:16:3e:0e:01:f9", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38cde06-15", "ovs_interfaceid": "e38cde06-158b-4c6f-8fac-d7220cab1c2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.473589] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Releasing lock "refresh_cache-837911fc-a8f2-41f9-bc0b-a3af4f29bd07" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.474438] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Instance network_info: |[{"id": "e38cde06-158b-4c6f-8fac-d7220cab1c2b", "address": "fa:16:3e:0e:01:f9", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38cde06-15", "ovs_interfaceid": "e38cde06-158b-4c6f-8fac-d7220cab1c2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 645.474828] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:01:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e38cde06-158b-4c6f-8fac-d7220cab1c2b', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 645.486420] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Creating folder: Project (afd7de5880f44f51a43d504b9c6fe8da). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 645.487057] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e52ac15c-f20b-46f3-965a-be2212e8413a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.502843] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Created folder: Project (afd7de5880f44f51a43d504b9c6fe8da) in parent group-v685311. [ 645.503075] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Creating folder: Instances. Parent ref: group-v685333. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 645.503596] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee2d63ab-52d2-487d-a905-d47a367b7dc3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.512881] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Created folder: Instances in parent group-v685333. [ 645.513160] env[68673]: DEBUG oslo.service.loopingcall [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.513364] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 645.513569] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b82471a-1c95-4b76-bc4d-1e2a7da03e5a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.533703] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 645.533703] env[68673]: value = "task-3433430" [ 645.533703] env[68673]: _type = "Task" [ 645.533703] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.543374] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433430, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.674077] env[68673]: DEBUG nova.network.neutron [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Updating instance_info_cache with network_info: [{"id": "21de1dd5-9c92-4d55-8f8c-805fc111448f", "address": "fa:16:3e:06:db:8b", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21de1dd5-9c", "ovs_interfaceid": "21de1dd5-9c92-4d55-8f8c-805fc111448f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.692305] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "refresh_cache-c09c33d3-ae8a-4057-9f7d-6a4b4948423d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.692709] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Instance network_info: |[{"id": "21de1dd5-9c92-4d55-8f8c-805fc111448f", "address": "fa:16:3e:06:db:8b", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21de1dd5-9c", "ovs_interfaceid": "21de1dd5-9c92-4d55-8f8c-805fc111448f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 645.693482] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:db:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21de1dd5-9c92-4d55-8f8c-805fc111448f', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 645.701349] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating folder: Project (92e8efd351c449e8815c0ec3b6070d20). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 645.702019] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-457d76de-3a57-43a0-9218-2b2b104ab161 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.713567] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created folder: Project (92e8efd351c449e8815c0ec3b6070d20) in parent group-v685311. [ 645.713821] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating folder: Instances. Parent ref: group-v685336. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 645.714060] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a06defb-6340-4878-a922-7e8da167ebb7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.723937] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created folder: Instances in parent group-v685336. [ 645.724055] env[68673]: DEBUG oslo.service.loopingcall [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.725264] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 645.725264] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-390e85c5-306d-47fe-a5f8-88a748c57720 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.752638] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 645.752638] env[68673]: value = "task-3433433" [ 645.752638] env[68673]: _type = "Task" [ 645.752638] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.763357] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433433, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.989960] env[68673]: DEBUG nova.network.neutron [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Successfully updated port: 2e792a23-bdb5-4319-9335-4206b5d5b761 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 646.007995] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquiring lock "refresh_cache-31700289-ac8c-47a9-b4e0-981b5c9df645" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.007995] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquired lock "refresh_cache-31700289-ac8c-47a9-b4e0-981b5c9df645" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.007995] env[68673]: DEBUG nova.network.neutron [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 646.050452] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433430, 'name': CreateVM_Task, 'duration_secs': 0.333629} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.051496] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 646.052641] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.053369] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.053369] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 646.053369] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34024f1c-c3c2-45be-bda5-27e8b62e0a09 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.059024] env[68673]: DEBUG oslo_vmware.api [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for the task: (returnval){ [ 646.059024] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]529b24aa-3e3e-81a6-7316-4713196e9144" [ 646.059024] env[68673]: _type = "Task" [ 646.059024] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.072681] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.072991] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 646.073264] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.156311] env[68673]: DEBUG nova.network.neutron [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.267569] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433433, 'name': CreateVM_Task, 'duration_secs': 0.344256} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.270236] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 646.271403] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.272477] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.272477] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 646.272477] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6d4505a-73b8-4494-8ac1-77c0149ceb12 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.277453] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 646.277453] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52203c61-7542-8996-96b5-5456bc121ca5" [ 646.277453] env[68673]: _type = "Task" [ 646.277453] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.287819] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52203c61-7542-8996-96b5-5456bc121ca5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.709788] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "dcb71746-662e-4ace-afcb-a997d236f12b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.710240] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "dcb71746-662e-4ace-afcb-a997d236f12b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.791804] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.792146] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 646.792364] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.438079] env[68673]: DEBUG nova.network.neutron [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Updating instance_info_cache with network_info: [{"id": "2e792a23-bdb5-4319-9335-4206b5d5b761", "address": "fa:16:3e:67:04:f0", "network": {"id": "cba4fa08-8fa4-4a91-9a98-c19df6d15030", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2042026522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a985518815e0473a8648d59a868dbfab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e792a23-bd", "ovs_interfaceid": "2e792a23-bdb5-4319-9335-4206b5d5b761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.459568] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Releasing lock "refresh_cache-31700289-ac8c-47a9-b4e0-981b5c9df645" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.460204] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Instance network_info: |[{"id": "2e792a23-bdb5-4319-9335-4206b5d5b761", "address": "fa:16:3e:67:04:f0", "network": {"id": "cba4fa08-8fa4-4a91-9a98-c19df6d15030", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2042026522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a985518815e0473a8648d59a868dbfab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e792a23-bd", "ovs_interfaceid": "2e792a23-bdb5-4319-9335-4206b5d5b761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 647.461015] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:04:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '424fd631-4456-4ce2-8924-a2ed81d60bd6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e792a23-bdb5-4319-9335-4206b5d5b761', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 647.471825] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Creating folder: Project (a985518815e0473a8648d59a868dbfab). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 647.472221] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48b0f3a8-222d-4841-83b0-2aa529c5a57f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.487178] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Created folder: Project (a985518815e0473a8648d59a868dbfab) in parent group-v685311. [ 647.487546] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Creating folder: Instances. Parent ref: group-v685339. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 647.487860] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-168bafd4-7cf5-4d8d-a9f9-9716c5729de7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.498351] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Created folder: Instances in parent group-v685339. [ 647.498460] env[68673]: DEBUG oslo.service.loopingcall [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 647.498633] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 647.498835] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a07a8ae-3e84-45a0-89c5-611c4de02a64 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.520549] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 647.520549] env[68673]: value = "task-3433436" [ 647.520549] env[68673]: _type = "Task" [ 647.520549] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.530266] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433436, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.031342] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433436, 'name': CreateVM_Task, 'duration_secs': 0.328606} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.031590] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 648.032378] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.032450] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.032767] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 648.033060] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09057d6e-824a-43ca-9aac-0a1b996387c8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.038071] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Waiting for the task: (returnval){ [ 648.038071] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52433470-f445-d35e-de35-5d0985323d02" [ 648.038071] env[68673]: _type = "Task" [ 648.038071] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.054502] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52433470-f445-d35e-de35-5d0985323d02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.388682] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Received event network-changed-ac691c8e-e9d5-4cde-a6e6-e06129944a9d {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 648.388936] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Refreshing instance network info cache due to event network-changed-ac691c8e-e9d5-4cde-a6e6-e06129944a9d. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 648.389192] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquiring lock "refresh_cache-377657cd-9913-49ec-a0f8-a701655ff68d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.389327] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquired lock "refresh_cache-377657cd-9913-49ec-a0f8-a701655ff68d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.389912] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Refreshing network info cache for port ac691c8e-e9d5-4cde-a6e6-e06129944a9d {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 648.551166] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.551166] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 648.551166] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.711460] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Updated VIF entry in instance network info cache for port ac691c8e-e9d5-4cde-a6e6-e06129944a9d. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 649.711460] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Updating instance_info_cache with network_info: [{"id": "ac691c8e-e9d5-4cde-a6e6-e06129944a9d", "address": "fa:16:3e:b8:2a:d8", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac691c8e-e9", "ovs_interfaceid": "ac691c8e-e9d5-4cde-a6e6-e06129944a9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.723118] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Releasing lock "refresh_cache-377657cd-9913-49ec-a0f8-a701655ff68d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.723381] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Received event network-vif-plugged-e38cde06-158b-4c6f-8fac-d7220cab1c2b {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 649.723576] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquiring lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.723778] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.723939] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.724113] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] No waiting events found dispatching network-vif-plugged-e38cde06-158b-4c6f-8fac-d7220cab1c2b {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 649.724281] env[68673]: WARNING nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Received unexpected event network-vif-plugged-e38cde06-158b-4c6f-8fac-d7220cab1c2b for instance with vm_state building and task_state spawning. [ 649.724445] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Received event network-vif-plugged-21de1dd5-9c92-4d55-8f8c-805fc111448f {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 649.724605] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquiring lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.724780] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.724939] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.725796] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] No waiting events found dispatching network-vif-plugged-21de1dd5-9c92-4d55-8f8c-805fc111448f {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 649.725879] env[68673]: WARNING nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Received unexpected event network-vif-plugged-21de1dd5-9c92-4d55-8f8c-805fc111448f for instance with vm_state building and task_state spawning. [ 649.726095] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Received event network-changed-e38cde06-158b-4c6f-8fac-d7220cab1c2b {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 649.726322] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Refreshing instance network info cache due to event network-changed-e38cde06-158b-4c6f-8fac-d7220cab1c2b. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 649.726565] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquiring lock "refresh_cache-837911fc-a8f2-41f9-bc0b-a3af4f29bd07" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.726873] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquired lock "refresh_cache-837911fc-a8f2-41f9-bc0b-a3af4f29bd07" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.727118] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Refreshing network info cache for port e38cde06-158b-4c6f-8fac-d7220cab1c2b {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 652.006461] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Updated VIF entry in instance network info cache for port e38cde06-158b-4c6f-8fac-d7220cab1c2b. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 652.006461] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Updating instance_info_cache with network_info: [{"id": "e38cde06-158b-4c6f-8fac-d7220cab1c2b", "address": "fa:16:3e:0e:01:f9", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38cde06-15", "ovs_interfaceid": "e38cde06-158b-4c6f-8fac-d7220cab1c2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.022070] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Releasing lock "refresh_cache-837911fc-a8f2-41f9-bc0b-a3af4f29bd07" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.022368] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Received event network-changed-21de1dd5-9c92-4d55-8f8c-805fc111448f {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 652.022540] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Refreshing instance network info cache due to event network-changed-21de1dd5-9c92-4d55-8f8c-805fc111448f. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 652.022832] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquiring lock "refresh_cache-c09c33d3-ae8a-4057-9f7d-6a4b4948423d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.022993] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquired lock "refresh_cache-c09c33d3-ae8a-4057-9f7d-6a4b4948423d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.023175] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Refreshing network info cache for port 21de1dd5-9c92-4d55-8f8c-805fc111448f {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 652.598148] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquiring lock "adb818a9-e799-4f57-93f6-ee4e32104d61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.598332] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.025827] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9f496b55-4fee-459c-bddd-18da5214b14e tempest-ServersWithSpecificFlavorTestJSON-151809798 tempest-ServersWithSpecificFlavorTestJSON-151809798-project-member] Acquiring lock "7f2eb979-8932-4a35-a700-2fc40eb24310" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.026260] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9f496b55-4fee-459c-bddd-18da5214b14e tempest-ServersWithSpecificFlavorTestJSON-151809798 tempest-ServersWithSpecificFlavorTestJSON-151809798-project-member] Lock "7f2eb979-8932-4a35-a700-2fc40eb24310" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.077386] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Updated VIF entry in instance network info cache for port 21de1dd5-9c92-4d55-8f8c-805fc111448f. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 654.077386] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Updating instance_info_cache with network_info: [{"id": "21de1dd5-9c92-4d55-8f8c-805fc111448f", "address": "fa:16:3e:06:db:8b", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21de1dd5-9c", "ovs_interfaceid": "21de1dd5-9c92-4d55-8f8c-805fc111448f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.100238] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Releasing lock "refresh_cache-c09c33d3-ae8a-4057-9f7d-6a4b4948423d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.100238] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Received event network-vif-plugged-2e792a23-bdb5-4319-9335-4206b5d5b761 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 654.100238] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquiring lock "31700289-ac8c-47a9-b4e0-981b5c9df645-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.100238] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Lock "31700289-ac8c-47a9-b4e0-981b5c9df645-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.100400] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Lock "31700289-ac8c-47a9-b4e0-981b5c9df645-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.100437] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] No waiting events found dispatching network-vif-plugged-2e792a23-bdb5-4319-9335-4206b5d5b761 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 654.100618] env[68673]: WARNING nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Received unexpected event network-vif-plugged-2e792a23-bdb5-4319-9335-4206b5d5b761 for instance with vm_state building and task_state spawning. [ 654.100791] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Received event network-changed-2e792a23-bdb5-4319-9335-4206b5d5b761 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 654.101327] env[68673]: DEBUG nova.compute.manager [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Refreshing instance network info cache due to event network-changed-2e792a23-bdb5-4319-9335-4206b5d5b761. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 654.101700] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquiring lock "refresh_cache-31700289-ac8c-47a9-b4e0-981b5c9df645" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.101853] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Acquired lock "refresh_cache-31700289-ac8c-47a9-b4e0-981b5c9df645" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.102242] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Refreshing network info cache for port 2e792a23-bdb5-4319-9335-4206b5d5b761 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 656.103180] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Updated VIF entry in instance network info cache for port 2e792a23-bdb5-4319-9335-4206b5d5b761. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 656.103180] env[68673]: DEBUG nova.network.neutron [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Updating instance_info_cache with network_info: [{"id": "2e792a23-bdb5-4319-9335-4206b5d5b761", "address": "fa:16:3e:67:04:f0", "network": {"id": "cba4fa08-8fa4-4a91-9a98-c19df6d15030", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2042026522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a985518815e0473a8648d59a868dbfab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e792a23-bd", "ovs_interfaceid": "2e792a23-bdb5-4319-9335-4206b5d5b761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.115191] env[68673]: DEBUG oslo_concurrency.lockutils [req-6cd9773e-e096-43e6-a684-d642a61f95ac req-e9a5eaa8-1404-4ffd-9ee3-b5088fa331e3 service nova] Releasing lock "refresh_cache-31700289-ac8c-47a9-b4e0-981b5c9df645" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.803805] env[68673]: DEBUG oslo_concurrency.lockutils [None req-83345f96-9f0e-43f9-8c7b-d2078befeae5 tempest-ImagesOneServerNegativeTestJSON-2141694111 tempest-ImagesOneServerNegativeTestJSON-2141694111-project-member] Acquiring lock "630479ff-b2ea-4189-91b5-1f9420715526" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.804093] env[68673]: DEBUG oslo_concurrency.lockutils [None req-83345f96-9f0e-43f9-8c7b-d2078befeae5 tempest-ImagesOneServerNegativeTestJSON-2141694111 tempest-ImagesOneServerNegativeTestJSON-2141694111-project-member] Lock "630479ff-b2ea-4189-91b5-1f9420715526" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.146175] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7a8eded3-3c71-43ce-9443-836e020c9e69 tempest-ServerExternalEventsTest-1747200570 tempest-ServerExternalEventsTest-1747200570-project-member] Acquiring lock "1ac1e6de-b44b-4732-aa67-35d01ec42309" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.146482] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7a8eded3-3c71-43ce-9443-836e020c9e69 tempest-ServerExternalEventsTest-1747200570 tempest-ServerExternalEventsTest-1747200570-project-member] Lock "1ac1e6de-b44b-4732-aa67-35d01ec42309" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.283882] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1f9021c0-81a0-45e0-a068-bf9d4fee2d58 tempest-ServersNegativeTestJSON-723567342 tempest-ServersNegativeTestJSON-723567342-project-member] Acquiring lock "45d008a0-2f5b-4477-b882-fb5039101c88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.285287] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1f9021c0-81a0-45e0-a068-bf9d4fee2d58 tempest-ServersNegativeTestJSON-723567342 tempest-ServersNegativeTestJSON-723567342-project-member] Lock "45d008a0-2f5b-4477-b882-fb5039101c88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.520397] env[68673]: DEBUG oslo_concurrency.lockutils [None req-64b76611-7407-4df0-85ac-7eb14c1bbe7d tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] Acquiring lock "7dccdc28-d60a-4bf5-8a4b-1db09c68a48b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.522125] env[68673]: DEBUG oslo_concurrency.lockutils [None req-64b76611-7407-4df0-85ac-7eb14c1bbe7d tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] Lock "7dccdc28-d60a-4bf5-8a4b-1db09c68a48b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.632790] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "99af213d-076b-411c-955a-5a03ff83602f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.632790] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "99af213d-076b-411c-955a-5a03ff83602f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.660823] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "3535ecb8-b183-4d13-b894-60bdd1dca229" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.660823] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "3535ecb8-b183-4d13-b894-60bdd1dca229" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.090426] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ceed0630-fce6-40c1-81f3-c59b3463b300 tempest-ServerActionsTestOtherB-436139619 tempest-ServerActionsTestOtherB-436139619-project-member] Acquiring lock "eeb2f3f4-2a86-499e-b400-c53120eb2067" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.091790] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ceed0630-fce6-40c1-81f3-c59b3463b300 tempest-ServerActionsTestOtherB-436139619 tempest-ServerActionsTestOtherB-436139619-project-member] Lock "eeb2f3f4-2a86-499e-b400-c53120eb2067" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.517470] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ab406369-deb8-48ff-a34e-e150f318fe83 tempest-ServersTestFqdnHostnames-1604400882 tempest-ServersTestFqdnHostnames-1604400882-project-member] Acquiring lock "9e870122-8ffa-48e8-bf17-3ec1c06a50fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.517696] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ab406369-deb8-48ff-a34e-e150f318fe83 tempest-ServersTestFqdnHostnames-1604400882 tempest-ServersTestFqdnHostnames-1604400882-project-member] Lock "9e870122-8ffa-48e8-bf17-3ec1c06a50fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.252132] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2d899b25-40f2-4cf7-9cf6-92ae6f3719a6 tempest-ServerAddressesNegativeTestJSON-104457049 tempest-ServerAddressesNegativeTestJSON-104457049-project-member] Acquiring lock "315775cb-2994-4099-ba4c-3cc6a15f8e0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.252808] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2d899b25-40f2-4cf7-9cf6-92ae6f3719a6 tempest-ServerAddressesNegativeTestJSON-104457049 tempest-ServerAddressesNegativeTestJSON-104457049-project-member] Lock "315775cb-2994-4099-ba4c-3cc6a15f8e0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.119539] env[68673]: DEBUG oslo_concurrency.lockutils [None req-fa96f40e-1b55-4b60-a49c-28bc2d6c5647 tempest-FloatingIPsAssociationTestJSON-397346482 tempest-FloatingIPsAssociationTestJSON-397346482-project-member] Acquiring lock "ff20518b-b7b8-447c-96eb-5a0f85c4db9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.119771] env[68673]: DEBUG oslo_concurrency.lockutils [None req-fa96f40e-1b55-4b60-a49c-28bc2d6c5647 tempest-FloatingIPsAssociationTestJSON-397346482 tempest-FloatingIPsAssociationTestJSON-397346482-project-member] Lock "ff20518b-b7b8-447c-96eb-5a0f85c4db9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.236255] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed0833da-9945-46ee-85cd-04685b2b6bc3 tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] Acquiring lock "67354344-5870-4234-a9b2-33b330dfe55f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.236255] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed0833da-9945-46ee-85cd-04685b2b6bc3 tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] Lock "67354344-5870-4234-a9b2-33b330dfe55f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.714103] env[68673]: DEBUG oslo_concurrency.lockutils [None req-63b908b9-74bf-4aa4-88bf-4a3e4f28c28e tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] Acquiring lock "ad920a00-482e-4f01-b357-573cc0bf7eca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.714391] env[68673]: DEBUG oslo_concurrency.lockutils [None req-63b908b9-74bf-4aa4-88bf-4a3e4f28c28e tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] Lock "ad920a00-482e-4f01-b357-573cc0bf7eca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.072912] env[68673]: DEBUG oslo_concurrency.lockutils [None req-dabb2182-9014-4ddd-a854-5ffedc276fae tempest-ServerShowV257Test-1275903674 tempest-ServerShowV257Test-1275903674-project-member] Acquiring lock "189516d5-97b5-4339-b0d9-94d256e36c55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.072912] env[68673]: DEBUG oslo_concurrency.lockutils [None req-dabb2182-9014-4ddd-a854-5ffedc276fae tempest-ServerShowV257Test-1275903674 tempest-ServerShowV257Test-1275903674-project-member] Lock "189516d5-97b5-4339-b0d9-94d256e36c55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.604223] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5627b011-47ae-47ed-9355-27c7ccea17ca tempest-InstanceActionsTestJSON-1167780584 tempest-InstanceActionsTestJSON-1167780584-project-member] Acquiring lock "e9d6e27c-ba9a-45d5-ae1c-2558b44d9659" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.604533] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5627b011-47ae-47ed-9355-27c7ccea17ca tempest-InstanceActionsTestJSON-1167780584 tempest-InstanceActionsTestJSON-1167780584-project-member] Lock "e9d6e27c-ba9a-45d5-ae1c-2558b44d9659" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.432647] env[68673]: WARNING oslo_vmware.rw_handles [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 677.432647] env[68673]: ERROR oslo_vmware.rw_handles [ 677.434314] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 677.439654] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 677.439654] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Copying Virtual Disk [datastore1] vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/aeaca262-49c1-4294-bd57-748b0c3423f5/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 677.439654] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3172391-1953-4479-8c9f-00069251e494 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.446465] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Waiting for the task: (returnval){ [ 677.446465] env[68673]: value = "task-3433437" [ 677.446465] env[68673]: _type = "Task" [ 677.446465] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.455426] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Task: {'id': task-3433437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.957767] env[68673]: DEBUG oslo_vmware.exceptions [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 677.958246] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.962132] env[68673]: ERROR nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 677.962132] env[68673]: Faults: ['InvalidArgument'] [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Traceback (most recent call last): [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] yield resources [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] self.driver.spawn(context, instance, image_meta, [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] self._fetch_image_if_missing(context, vi) [ 677.962132] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] image_cache(vi, tmp_image_ds_loc) [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] vm_util.copy_virtual_disk( [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] session._wait_for_task(vmdk_copy_task) [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] return self.wait_for_task(task_ref) [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] return evt.wait() [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] result = hub.switch() [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 677.962542] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] return self.greenlet.switch() [ 677.962928] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 677.962928] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] self.f(*self.args, **self.kw) [ 677.962928] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 677.962928] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] raise exceptions.translate_fault(task_info.error) [ 677.962928] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 677.962928] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Faults: ['InvalidArgument'] [ 677.962928] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] [ 677.962928] env[68673]: INFO nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Terminating instance [ 677.964801] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.965042] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 677.965765] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 677.965992] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 677.966491] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b55d105-b0e6-46a4-b02e-5d42ab4a733e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.969364] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d72903-b501-48b6-ae12-2b4f78efdd36 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.981385] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 677.981645] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 677.984534] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32849d53-dfc9-4949-8e00-cadcf2d83546 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.991266] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 677.991266] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26ea50a1-3816-47e0-9605-f85a21d51bc0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.996123] env[68673]: DEBUG oslo_vmware.api [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Waiting for the task: (returnval){ [ 677.996123] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]523d6901-bab0-72de-aece-0a19fb4feb97" [ 677.996123] env[68673]: _type = "Task" [ 677.996123] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.007222] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 678.007473] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Creating directory with path [datastore1] vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 678.007694] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcfa1f63-b34b-4edd-9caa-4c8c6f303ed4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.028179] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Created directory with path [datastore1] vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 678.028388] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Fetch image to [datastore1] vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 678.028555] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 678.029354] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccee044e-39a0-4dbc-a32e-d4e5d7ea8aeb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.037792] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c4b223-29f2-4521-867f-6eb734bde1c7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.051292] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f09ebd-b40a-4503-ad90-408b95768666 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.059671] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 678.059833] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 678.059963] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Deleting the datastore file [datastore1] f74fc310-4045-448f-93f6-96196d5f38b2 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 678.087939] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56107ebd-4bcf-47f2-92ed-110fa6fd383d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.090757] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18a4496-bdd6-405d-8f62-c760f2774851 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.098231] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-954b4183-396d-4f14-b274-dd9d7ffeacd6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.101178] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Waiting for the task: (returnval){ [ 678.101178] env[68673]: value = "task-3433439" [ 678.101178] env[68673]: _type = "Task" [ 678.101178] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.109620] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Task: {'id': task-3433439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.134787] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 678.211199] env[68673]: DEBUG oslo_vmware.rw_handles [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 678.274903] env[68673]: DEBUG oslo_vmware.rw_handles [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 678.275143] env[68673]: DEBUG oslo_vmware.rw_handles [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 678.611951] env[68673]: DEBUG oslo_vmware.api [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Task: {'id': task-3433439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075877} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.612301] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 678.612396] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 678.612563] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 678.612995] env[68673]: INFO nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Took 0.65 seconds to destroy the instance on the hypervisor. [ 678.616117] env[68673]: DEBUG nova.compute.claims [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 678.616117] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.616367] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.135647] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f917352a-2349-4911-880f-4743096f63f3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.145260] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12daedb0-5d48-4f77-9ea3-43ec3671e8cd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.178880] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d4b7d3-d85e-421f-965f-6b2241d37acc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.187057] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f151448b-8b28-41b1-8d73-c6c4da8ead5f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.201091] env[68673]: DEBUG nova.compute.provider_tree [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.216905] env[68673]: DEBUG nova.scheduler.client.report [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 679.242667] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.626s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.243266] env[68673]: ERROR nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 679.243266] env[68673]: Faults: ['InvalidArgument'] [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Traceback (most recent call last): [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] self.driver.spawn(context, instance, image_meta, [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] self._fetch_image_if_missing(context, vi) [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] image_cache(vi, tmp_image_ds_loc) [ 679.243266] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] vm_util.copy_virtual_disk( [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] session._wait_for_task(vmdk_copy_task) [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] return self.wait_for_task(task_ref) [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] return evt.wait() [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] result = hub.switch() [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] return self.greenlet.switch() [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 679.243701] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] self.f(*self.args, **self.kw) [ 679.244118] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 679.244118] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] raise exceptions.translate_fault(task_info.error) [ 679.244118] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 679.244118] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Faults: ['InvalidArgument'] [ 679.244118] env[68673]: ERROR nova.compute.manager [instance: f74fc310-4045-448f-93f6-96196d5f38b2] [ 679.244118] env[68673]: DEBUG nova.compute.utils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 679.247678] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Build of instance f74fc310-4045-448f-93f6-96196d5f38b2 was re-scheduled: A specified parameter was not correct: fileType [ 679.247678] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 679.247799] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 679.247974] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 679.248147] env[68673]: DEBUG nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 679.248309] env[68673]: DEBUG nova.network.neutron [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 680.280675] env[68673]: DEBUG nova.network.neutron [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.291989] env[68673]: INFO nova.compute.manager [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: f74fc310-4045-448f-93f6-96196d5f38b2] Took 1.04 seconds to deallocate network for instance. [ 680.430678] env[68673]: INFO nova.scheduler.client.report [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Deleted allocations for instance f74fc310-4045-448f-93f6-96196d5f38b2 [ 680.464505] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba6375a1-f5df-46c9-bd59-76e4689ef0a6 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Lock "f74fc310-4045-448f-93f6-96196d5f38b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.205s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.503378] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 680.588815] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.588815] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.589629] env[68673]: INFO nova.compute.claims [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.099079] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c517366-6121-4f6b-b10d-1e0c870f1289 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.108254] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050e30fc-c77f-4bdb-96e2-f74a457f4775 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.142414] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8fdadf-d34d-4ab9-ae13-1925540177ff {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.150035] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02c33d4-6b93-4505-9452-e4c62ddc4b0f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.164985] env[68673]: DEBUG nova.compute.provider_tree [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.180145] env[68673]: DEBUG nova.scheduler.client.report [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 681.207069] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.619s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.207600] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 681.254217] env[68673]: DEBUG nova.compute.utils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 681.255705] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 681.255877] env[68673]: DEBUG nova.network.neutron [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 681.268294] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 681.373071] env[68673]: DEBUG nova.policy [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a5159631aca4d74a20f06e430887c18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d21d7d062a441d9be569a8cbcfc1b11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 681.381069] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 681.417097] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 681.417369] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 681.417369] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.417586] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 681.417685] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.417882] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 681.418193] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 681.418292] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 681.418464] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 681.418728] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 681.418811] env[68673]: DEBUG nova.virt.hardware [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 681.420134] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432506fd-7de4-4b25-b28a-519f1504a1cd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.430771] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e5d9c4-345c-4f89-8269-cde8e7e3459b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.994292] env[68673]: DEBUG nova.network.neutron [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Successfully created port: 17995d54-8a14-4317-b4b0-a82fdc47ba13 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.643455] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df7ceddc-a5ff-4352-990f-0dbea8a217d5 tempest-ServerDiagnosticsV248Test-1763732598 tempest-ServerDiagnosticsV248Test-1763732598-project-member] Acquiring lock "7708d377-ec43-47d9-ba3d-7bb1020415cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.643455] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df7ceddc-a5ff-4352-990f-0dbea8a217d5 tempest-ServerDiagnosticsV248Test-1763732598 tempest-ServerDiagnosticsV248Test-1763732598-project-member] Lock "7708d377-ec43-47d9-ba3d-7bb1020415cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.987068] env[68673]: DEBUG nova.network.neutron [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Successfully updated port: 17995d54-8a14-4317-b4b0-a82fdc47ba13 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 682.999401] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "refresh_cache-d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.999401] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquired lock "refresh_cache-d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.999401] env[68673]: DEBUG nova.network.neutron [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 683.062532] env[68673]: DEBUG nova.network.neutron [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 683.523352] env[68673]: DEBUG nova.network.neutron [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Updating instance_info_cache with network_info: [{"id": "17995d54-8a14-4317-b4b0-a82fdc47ba13", "address": "fa:16:3e:a9:ee:c2", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17995d54-8a", "ovs_interfaceid": "17995d54-8a14-4317-b4b0-a82fdc47ba13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.555229] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Releasing lock "refresh_cache-d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.556434] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Instance network_info: |[{"id": "17995d54-8a14-4317-b4b0-a82fdc47ba13", "address": "fa:16:3e:a9:ee:c2", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17995d54-8a", "ovs_interfaceid": "17995d54-8a14-4317-b4b0-a82fdc47ba13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 683.556605] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:ee:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17995d54-8a14-4317-b4b0-a82fdc47ba13', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.566118] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Creating folder: Project (6d21d7d062a441d9be569a8cbcfc1b11). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 683.567103] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a178f10-4631-437c-8612-64f652cca735 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.576811] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Created folder: Project (6d21d7d062a441d9be569a8cbcfc1b11) in parent group-v685311. [ 683.576993] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Creating folder: Instances. Parent ref: group-v685342. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 683.577231] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0c15b87-6cfc-4d81-ae8a-cb026c3fa49d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.588158] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Created folder: Instances in parent group-v685342. [ 683.588406] env[68673]: DEBUG oslo.service.loopingcall [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 683.588589] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 683.588790] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9509018d-2371-455f-81e6-79421109c59c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.612983] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.612983] env[68673]: value = "task-3433442" [ 683.612983] env[68673]: _type = "Task" [ 683.612983] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.621616] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433442, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.126913] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433442, 'name': CreateVM_Task, 'duration_secs': 0.298893} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.127788] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 684.129550] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.129719] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.130051] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 684.130302] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-901f82f6-96bc-4097-ba41-b55409de4b6d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.135072] env[68673]: DEBUG oslo_vmware.api [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for the task: (returnval){ [ 684.135072] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52159b19-265f-20c1-0b0b-527f65905f06" [ 684.135072] env[68673]: _type = "Task" [ 684.135072] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.143793] env[68673]: DEBUG oslo_vmware.api [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52159b19-265f-20c1-0b0b-527f65905f06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.354316] env[68673]: DEBUG nova.compute.manager [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Received event network-vif-plugged-17995d54-8a14-4317-b4b0-a82fdc47ba13 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 684.354316] env[68673]: DEBUG oslo_concurrency.lockutils [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] Acquiring lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.354316] env[68673]: DEBUG oslo_concurrency.lockutils [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] Lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.354316] env[68673]: DEBUG oslo_concurrency.lockutils [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] Lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.354924] env[68673]: DEBUG nova.compute.manager [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] No waiting events found dispatching network-vif-plugged-17995d54-8a14-4317-b4b0-a82fdc47ba13 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 684.355297] env[68673]: WARNING nova.compute.manager [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Received unexpected event network-vif-plugged-17995d54-8a14-4317-b4b0-a82fdc47ba13 for instance with vm_state building and task_state spawning. [ 684.355934] env[68673]: DEBUG nova.compute.manager [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Received event network-changed-17995d54-8a14-4317-b4b0-a82fdc47ba13 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 684.356305] env[68673]: DEBUG nova.compute.manager [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Refreshing instance network info cache due to event network-changed-17995d54-8a14-4317-b4b0-a82fdc47ba13. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 684.356855] env[68673]: DEBUG oslo_concurrency.lockutils [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] Acquiring lock "refresh_cache-d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.359112] env[68673]: DEBUG oslo_concurrency.lockutils [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] Acquired lock "refresh_cache-d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.359112] env[68673]: DEBUG nova.network.neutron [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Refreshing network info cache for port 17995d54-8a14-4317-b4b0-a82fdc47ba13 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 684.645148] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.645404] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.645620] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.871615] env[68673]: DEBUG nova.network.neutron [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Updated VIF entry in instance network info cache for port 17995d54-8a14-4317-b4b0-a82fdc47ba13. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 684.875529] env[68673]: DEBUG nova.network.neutron [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Updating instance_info_cache with network_info: [{"id": "17995d54-8a14-4317-b4b0-a82fdc47ba13", "address": "fa:16:3e:a9:ee:c2", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17995d54-8a", "ovs_interfaceid": "17995d54-8a14-4317-b4b0-a82fdc47ba13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.886690] env[68673]: DEBUG oslo_concurrency.lockutils [req-33e06608-b83f-4e13-befe-8b3bece77c6b req-97f40573-f99a-472f-ab38-13a15fd8a71f service nova] Releasing lock "refresh_cache-d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.137558] env[68673]: DEBUG oslo_concurrency.lockutils [None req-6877aa12-0f90-4e05-9e6f-6027e005aec3 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Acquiring lock "2148d351-6632-4979-9a3b-c79290d7e39e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.137832] env[68673]: DEBUG oslo_concurrency.lockutils [None req-6877aa12-0f90-4e05-9e6f-6027e005aec3 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Lock "2148d351-6632-4979-9a3b-c79290d7e39e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.047947] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "50cc30fa-01f0-441d-af41-76c5273123af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.047947] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "50cc30fa-01f0-441d-af41-76c5273123af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.874173] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e5db8f92-35ea-4274-aacc-b554471cd536 tempest-ServersTestManualDisk-612812194 tempest-ServersTestManualDisk-612812194-project-member] Acquiring lock "ede1744b-c382-4b37-8884-11b73093f632" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.874494] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e5db8f92-35ea-4274-aacc-b554471cd536 tempest-ServersTestManualDisk-612812194 tempest-ServersTestManualDisk-612812194-project-member] Lock "ede1744b-c382-4b37-8884-11b73093f632" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.289411] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.289685] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.314723] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.314723] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.330893] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.331237] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.331438] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.331596] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 698.332795] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50a3d7d-2a9e-4ffe-8a6e-e935655ec905 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.342777] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa0d6d9-166e-477f-9a5c-dd713cf283da {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.357830] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed4050f-f243-4fe3-a590-095bf09cbd98 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.366720] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c621a35-d313-49eb-ab1b-6d6143a1a4ca {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.396963] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180904MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 698.396963] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.396963] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.472183] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d7ad9f48-d538-4bc4-b911-6e564cd5f457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.472360] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfd5ac40-9e14-473a-8f14-895534a4642e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.472489] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7fd1f100-addc-4319-acf8-13f19a4f7b3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.472611] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance de59505b-0bbf-41b4-8d06-65ab40e8a5a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.472786] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 02517d31-0830-4e75-bde3-5f2e939f1328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.472931] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 377657cd-9913-49ec-a0f8-a701655ff68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.473068] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.473188] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.473303] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.473415] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.500302] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.510740] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.522686] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.532446] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7f2eb979-8932-4a35-a700-2fc40eb24310 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.546164] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 630479ff-b2ea-4189-91b5-1f9420715526 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.554851] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 1ac1e6de-b44b-4732-aa67-35d01ec42309 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.566672] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 45d008a0-2f5b-4477-b882-fb5039101c88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.576439] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7dccdc28-d60a-4bf5-8a4b-1db09c68a48b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.599717] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 99af213d-076b-411c-955a-5a03ff83602f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.610598] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3535ecb8-b183-4d13-b894-60bdd1dca229 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.620589] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance eeb2f3f4-2a86-499e-b400-c53120eb2067 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.630990] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 9e870122-8ffa-48e8-bf17-3ec1c06a50fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.643327] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 315775cb-2994-4099-ba4c-3cc6a15f8e0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.652967] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ff20518b-b7b8-447c-96eb-5a0f85c4db9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.663213] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 67354344-5870-4234-a9b2-33b330dfe55f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.672846] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ad920a00-482e-4f01-b357-573cc0bf7eca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.682423] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 189516d5-97b5-4339-b0d9-94d256e36c55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.694909] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance e9d6e27c-ba9a-45d5-ae1c-2558b44d9659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.704441] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7708d377-ec43-47d9-ba3d-7bb1020415cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.714298] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2148d351-6632-4979-9a3b-c79290d7e39e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.724036] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 50cc30fa-01f0-441d-af41-76c5273123af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.738441] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ede1744b-c382-4b37-8884-11b73093f632 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.739712] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 698.739712] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 699.160541] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef803c6-bd44-408a-b043-2e4a3f3ea881 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.168210] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462ebbc8-3c99-4a2e-a2f0-2df9056b470b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.199123] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c21db2f-5f73-4970-b45f-bec9edeb688b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.206588] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dcd553-c076-47cc-8ffb-e67209fc238c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.219887] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.230673] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 699.261138] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 699.261424] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.864s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.732177] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.732395] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 699.732481] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 699.766196] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.766383] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.766518] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.766644] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.766784] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.766879] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.766999] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.770032] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.770217] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.770310] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 699.770431] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 699.770989] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.771181] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.771338] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.771502] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 699.783934] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.783934] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 725.693054] env[68673]: WARNING oslo_vmware.rw_handles [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 725.693054] env[68673]: ERROR oslo_vmware.rw_handles [ 725.693054] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 725.694934] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 725.697166] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Copying Virtual Disk [datastore1] vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/c7dec917-0dc1-4c4e-8dbc-076e72523472/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 725.697166] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b8589d6-996c-4648-a823-a4ddc056cb16 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.704407] env[68673]: DEBUG oslo_vmware.api [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Waiting for the task: (returnval){ [ 725.704407] env[68673]: value = "task-3433443" [ 725.704407] env[68673]: _type = "Task" [ 725.704407] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.717358] env[68673]: DEBUG oslo_vmware.api [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Task: {'id': task-3433443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.215082] env[68673]: DEBUG oslo_vmware.exceptions [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 726.215379] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.215942] env[68673]: ERROR nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 726.215942] env[68673]: Faults: ['InvalidArgument'] [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Traceback (most recent call last): [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] yield resources [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] self.driver.spawn(context, instance, image_meta, [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] self._fetch_image_if_missing(context, vi) [ 726.215942] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] image_cache(vi, tmp_image_ds_loc) [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] vm_util.copy_virtual_disk( [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] session._wait_for_task(vmdk_copy_task) [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] return self.wait_for_task(task_ref) [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] return evt.wait() [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] result = hub.switch() [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 726.216399] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] return self.greenlet.switch() [ 726.216870] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 726.216870] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] self.f(*self.args, **self.kw) [ 726.216870] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 726.216870] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] raise exceptions.translate_fault(task_info.error) [ 726.216870] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 726.216870] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Faults: ['InvalidArgument'] [ 726.216870] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] [ 726.216870] env[68673]: INFO nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Terminating instance [ 726.217789] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.217989] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.218601] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 726.218776] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 726.218991] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73601760-df0e-441f-9348-be5f1b35dbe6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.221198] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7db67b-87e0-4c86-9c2d-83a6cfb5ef7e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.227924] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 726.228132] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5c2fb63-27cd-4ae9-88a3-2d9d6081800c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.230228] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.230399] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 726.231427] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46985c33-93eb-412d-9c16-1df413eaddfb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.236389] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 726.236389] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]527f0c39-270f-92c9-7bed-01d94cc2df5f" [ 726.236389] env[68673]: _type = "Task" [ 726.236389] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.244353] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]527f0c39-270f-92c9-7bed-01d94cc2df5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.413471] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 726.415970] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 726.415970] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Deleting the datastore file [datastore1] 7fd1f100-addc-4319-acf8-13f19a4f7b3c {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 726.415970] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b0eef49-8828-4b41-ba85-8eb75d42acee {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.420635] env[68673]: DEBUG oslo_vmware.api [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Waiting for the task: (returnval){ [ 726.420635] env[68673]: value = "task-3433445" [ 726.420635] env[68673]: _type = "Task" [ 726.420635] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.428585] env[68673]: DEBUG oslo_vmware.api [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Task: {'id': task-3433445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.746581] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 726.746869] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating directory with path [datastore1] vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.747107] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3af63bd6-4670-417f-ae17-85630fb8b058 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.758558] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Created directory with path [datastore1] vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.758763] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Fetch image to [datastore1] vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 726.758934] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 726.759683] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479898c0-236a-45cd-9851-e88b9a518acc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.766545] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdfda85-e781-485b-905e-0055f72f99b3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.775597] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32228815-783c-4679-a97b-4794d3a052ba {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.807263] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ba9281-2b4d-4e58-b48c-31fdf335c1ed {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.813591] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7a185c71-f576-4dca-8f4c-83d7d4ed4004 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.901116] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 726.935116] env[68673]: DEBUG oslo_vmware.api [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Task: {'id': task-3433445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081489} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.935116] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 726.935116] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 726.935456] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 726.935456] env[68673]: INFO nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Took 0.72 seconds to destroy the instance on the hypervisor. [ 726.937882] env[68673]: DEBUG nova.compute.claims [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 726.937882] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.937882] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.965975] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 727.028026] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 727.028026] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 727.385780] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11461979-7fb4-46d6-b2fd-223bab57ffe5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.394466] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f54a8ea-949a-4a3b-a3f2-8156a655e91f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.424953] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf659f0-d0d5-4aa7-ae7e-a26ca85e2c27 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.432103] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccc197c-15f2-44ac-8ef5-28f48c3ee0c6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.446351] env[68673]: DEBUG nova.compute.provider_tree [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.455058] env[68673]: DEBUG nova.scheduler.client.report [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 727.483642] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.546s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.484317] env[68673]: ERROR nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 727.484317] env[68673]: Faults: ['InvalidArgument'] [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Traceback (most recent call last): [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] self.driver.spawn(context, instance, image_meta, [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] self._fetch_image_if_missing(context, vi) [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] image_cache(vi, tmp_image_ds_loc) [ 727.484317] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] vm_util.copy_virtual_disk( [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] session._wait_for_task(vmdk_copy_task) [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] return self.wait_for_task(task_ref) [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] return evt.wait() [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] result = hub.switch() [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] return self.greenlet.switch() [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 727.484770] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] self.f(*self.args, **self.kw) [ 727.485189] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 727.485189] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] raise exceptions.translate_fault(task_info.error) [ 727.485189] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 727.485189] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Faults: ['InvalidArgument'] [ 727.485189] env[68673]: ERROR nova.compute.manager [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] [ 727.485917] env[68673]: DEBUG nova.compute.utils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 727.487467] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Build of instance 7fd1f100-addc-4319-acf8-13f19a4f7b3c was re-scheduled: A specified parameter was not correct: fileType [ 727.487467] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 727.487886] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 727.488117] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 727.488317] env[68673]: DEBUG nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 727.488518] env[68673]: DEBUG nova.network.neutron [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 727.984074] env[68673]: DEBUG nova.network.neutron [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.022803] env[68673]: INFO nova.compute.manager [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] [instance: 7fd1f100-addc-4319-acf8-13f19a4f7b3c] Took 0.53 seconds to deallocate network for instance. [ 728.144281] env[68673]: INFO nova.scheduler.client.report [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Deleted allocations for instance 7fd1f100-addc-4319-acf8-13f19a4f7b3c [ 728.164851] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4173ce85-4c3e-478d-b938-3030a729894b tempest-ServerDiagnosticsNegativeTest-1516373053 tempest-ServerDiagnosticsNegativeTest-1516373053-project-member] Lock "7fd1f100-addc-4319-acf8-13f19a4f7b3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.615s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.178028] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 728.232061] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.232061] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.232061] env[68673]: INFO nova.compute.claims [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.639228] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a09577-66b7-408a-a154-25f016233ac3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.646923] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668f4e8b-e3db-4123-884b-0df50814b443 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.678997] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1b7a33-b901-44f4-ad1f-54324bc2a220 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.686508] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de58d23-2468-49af-9268-4c9fdfa9034d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.699307] env[68673]: DEBUG nova.compute.provider_tree [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.709639] env[68673]: DEBUG nova.scheduler.client.report [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 728.723596] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.494s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.724113] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 728.757861] env[68673]: DEBUG nova.compute.utils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 728.759480] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 728.760259] env[68673]: DEBUG nova.network.neutron [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 728.768721] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 728.836500] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 728.853890] env[68673]: DEBUG nova.policy [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7191f1c3c95e4ffcb778a01a73538fcf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c31c27f0d8514130b99ce04dbe31c068', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 728.864807] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 728.865015] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 728.865184] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.865371] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 728.865526] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.865665] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 728.865876] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 728.866036] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 728.866203] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 728.866365] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 728.866534] env[68673]: DEBUG nova.virt.hardware [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.867676] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc404e10-fe60-41d6-be53-17aad8dde2ba {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.875518] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e9e393-8976-4ade-856e-f7bd6441e3a1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.473818] env[68673]: DEBUG nova.network.neutron [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Successfully created port: 39438d14-7869-4079-9c5d-52d22a019ab1 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.310318] env[68673]: DEBUG nova.compute.manager [req-58660cc3-6006-44db-83db-c8bf1ceebcfd req-45a6a9a3-06e4-4deb-9502-72f14f96c6f5 service nova] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Received event network-vif-plugged-39438d14-7869-4079-9c5d-52d22a019ab1 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 730.310544] env[68673]: DEBUG oslo_concurrency.lockutils [req-58660cc3-6006-44db-83db-c8bf1ceebcfd req-45a6a9a3-06e4-4deb-9502-72f14f96c6f5 service nova] Acquiring lock "579c535d-7061-4822-8f7f-50b36ddfd44b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.310747] env[68673]: DEBUG oslo_concurrency.lockutils [req-58660cc3-6006-44db-83db-c8bf1ceebcfd req-45a6a9a3-06e4-4deb-9502-72f14f96c6f5 service nova] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.310908] env[68673]: DEBUG oslo_concurrency.lockutils [req-58660cc3-6006-44db-83db-c8bf1ceebcfd req-45a6a9a3-06e4-4deb-9502-72f14f96c6f5 service nova] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.312351] env[68673]: DEBUG nova.compute.manager [req-58660cc3-6006-44db-83db-c8bf1ceebcfd req-45a6a9a3-06e4-4deb-9502-72f14f96c6f5 service nova] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] No waiting events found dispatching network-vif-plugged-39438d14-7869-4079-9c5d-52d22a019ab1 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 730.313180] env[68673]: WARNING nova.compute.manager [req-58660cc3-6006-44db-83db-c8bf1ceebcfd req-45a6a9a3-06e4-4deb-9502-72f14f96c6f5 service nova] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Received unexpected event network-vif-plugged-39438d14-7869-4079-9c5d-52d22a019ab1 for instance with vm_state building and task_state spawning. [ 730.420969] env[68673]: DEBUG nova.network.neutron [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Successfully updated port: 39438d14-7869-4079-9c5d-52d22a019ab1 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.487169] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquiring lock "refresh_cache-579c535d-7061-4822-8f7f-50b36ddfd44b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.487458] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquired lock "refresh_cache-579c535d-7061-4822-8f7f-50b36ddfd44b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.487500] env[68673]: DEBUG nova.network.neutron [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 730.541779] env[68673]: DEBUG nova.network.neutron [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.777845] env[68673]: DEBUG nova.network.neutron [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Updating instance_info_cache with network_info: [{"id": "39438d14-7869-4079-9c5d-52d22a019ab1", "address": "fa:16:3e:65:af:a8", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39438d14-78", "ovs_interfaceid": "39438d14-7869-4079-9c5d-52d22a019ab1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.804860] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Releasing lock "refresh_cache-579c535d-7061-4822-8f7f-50b36ddfd44b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.804860] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Instance network_info: |[{"id": "39438d14-7869-4079-9c5d-52d22a019ab1", "address": "fa:16:3e:65:af:a8", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39438d14-78", "ovs_interfaceid": "39438d14-7869-4079-9c5d-52d22a019ab1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 730.805106] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:af:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39438d14-7869-4079-9c5d-52d22a019ab1', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.812266] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Creating folder: Project (c31c27f0d8514130b99ce04dbe31c068). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.812817] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84465670-bdfc-48d0-9d28-6cb5d3e762f5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.824469] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Created folder: Project (c31c27f0d8514130b99ce04dbe31c068) in parent group-v685311. [ 730.824656] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Creating folder: Instances. Parent ref: group-v685345. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.824875] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26c09c11-945f-4e32-b2dd-a8f2a480b618 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.836140] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Created folder: Instances in parent group-v685345. [ 730.836140] env[68673]: DEBUG oslo.service.loopingcall [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.836140] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 730.836140] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1af7a509-f95f-421d-9fa3-e62b9ec1f28e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.854493] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.854493] env[68673]: value = "task-3433448" [ 730.854493] env[68673]: _type = "Task" [ 730.854493] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.864338] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433448, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.364615] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433448, 'name': CreateVM_Task, 'duration_secs': 0.489263} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.364907] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 731.366994] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.366994] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.366994] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 731.366994] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23248e53-b55f-4393-995e-9d23b8e5ba4e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.372565] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Waiting for the task: (returnval){ [ 731.372565] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52240475-110b-51e0-569e-7ec965105505" [ 731.372565] env[68673]: _type = "Task" [ 731.372565] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.378085] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52240475-110b-51e0-569e-7ec965105505, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.518699] env[68673]: DEBUG oslo_concurrency.lockutils [None req-451309f2-c3cf-4a9b-aa50-2c018190fce0 tempest-ServersAdmin275Test-1470588584 tempest-ServersAdmin275Test-1470588584-project-member] Acquiring lock "4c074098-9e15-495b-854a-109c8c5d9657" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.519085] env[68673]: DEBUG oslo_concurrency.lockutils [None req-451309f2-c3cf-4a9b-aa50-2c018190fce0 tempest-ServersAdmin275Test-1470588584 tempest-ServersAdmin275Test-1470588584-project-member] Lock "4c074098-9e15-495b-854a-109c8c5d9657" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.880755] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.881481] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.881481] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.492948] env[68673]: DEBUG nova.compute.manager [req-249d339c-75b6-4033-b423-fb7330dd8e62 req-a19e9dfe-d696-498b-a3db-16beae392779 service nova] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Received event network-changed-39438d14-7869-4079-9c5d-52d22a019ab1 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 732.493016] env[68673]: DEBUG nova.compute.manager [req-249d339c-75b6-4033-b423-fb7330dd8e62 req-a19e9dfe-d696-498b-a3db-16beae392779 service nova] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Refreshing instance network info cache due to event network-changed-39438d14-7869-4079-9c5d-52d22a019ab1. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 732.493181] env[68673]: DEBUG oslo_concurrency.lockutils [req-249d339c-75b6-4033-b423-fb7330dd8e62 req-a19e9dfe-d696-498b-a3db-16beae392779 service nova] Acquiring lock "refresh_cache-579c535d-7061-4822-8f7f-50b36ddfd44b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.493365] env[68673]: DEBUG oslo_concurrency.lockutils [req-249d339c-75b6-4033-b423-fb7330dd8e62 req-a19e9dfe-d696-498b-a3db-16beae392779 service nova] Acquired lock "refresh_cache-579c535d-7061-4822-8f7f-50b36ddfd44b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.493533] env[68673]: DEBUG nova.network.neutron [req-249d339c-75b6-4033-b423-fb7330dd8e62 req-a19e9dfe-d696-498b-a3db-16beae392779 service nova] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Refreshing network info cache for port 39438d14-7869-4079-9c5d-52d22a019ab1 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 732.905978] env[68673]: DEBUG nova.network.neutron [req-249d339c-75b6-4033-b423-fb7330dd8e62 req-a19e9dfe-d696-498b-a3db-16beae392779 service nova] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Updated VIF entry in instance network info cache for port 39438d14-7869-4079-9c5d-52d22a019ab1. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 732.911821] env[68673]: DEBUG nova.network.neutron [req-249d339c-75b6-4033-b423-fb7330dd8e62 req-a19e9dfe-d696-498b-a3db-16beae392779 service nova] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Updating instance_info_cache with network_info: [{"id": "39438d14-7869-4079-9c5d-52d22a019ab1", "address": "fa:16:3e:65:af:a8", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39438d14-78", "ovs_interfaceid": "39438d14-7869-4079-9c5d-52d22a019ab1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.920117] env[68673]: DEBUG oslo_concurrency.lockutils [req-249d339c-75b6-4033-b423-fb7330dd8e62 req-a19e9dfe-d696-498b-a3db-16beae392779 service nova] Releasing lock "refresh_cache-579c535d-7061-4822-8f7f-50b36ddfd44b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.779658] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.783317] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.783525] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.795949] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.796197] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.796370] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.796526] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 757.797634] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8e504f-378e-49f3-90c7-fbcc20709f9f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.806527] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3337c52d-e017-4f8e-9e39-c6467ff61ed6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.820414] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65dd8701-a9bb-4716-b12c-8c540a420de2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.826883] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ccdfb0-21b3-478a-bda0-9af5a7914490 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.857130] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180910MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 757.857258] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.857450] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.932233] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d7ad9f48-d538-4bc4-b911-6e564cd5f457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.932447] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfd5ac40-9e14-473a-8f14-895534a4642e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.932605] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance de59505b-0bbf-41b4-8d06-65ab40e8a5a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.932752] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 02517d31-0830-4e75-bde3-5f2e939f1328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.932880] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 377657cd-9913-49ec-a0f8-a701655ff68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.932999] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.933131] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.933246] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.933355] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.933467] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 757.946811] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.958525] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.969804] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7f2eb979-8932-4a35-a700-2fc40eb24310 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.980726] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 630479ff-b2ea-4189-91b5-1f9420715526 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.993449] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 1ac1e6de-b44b-4732-aa67-35d01ec42309 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.005329] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 45d008a0-2f5b-4477-b882-fb5039101c88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.016032] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7dccdc28-d60a-4bf5-8a4b-1db09c68a48b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.026885] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 99af213d-076b-411c-955a-5a03ff83602f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.037225] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3535ecb8-b183-4d13-b894-60bdd1dca229 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.047486] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance eeb2f3f4-2a86-499e-b400-c53120eb2067 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.057971] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 9e870122-8ffa-48e8-bf17-3ec1c06a50fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.070211] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 315775cb-2994-4099-ba4c-3cc6a15f8e0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.078988] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ff20518b-b7b8-447c-96eb-5a0f85c4db9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.088839] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 67354344-5870-4234-a9b2-33b330dfe55f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.100491] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ad920a00-482e-4f01-b357-573cc0bf7eca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.112525] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 189516d5-97b5-4339-b0d9-94d256e36c55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.123009] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance e9d6e27c-ba9a-45d5-ae1c-2558b44d9659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.134135] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7708d377-ec43-47d9-ba3d-7bb1020415cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.144597] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2148d351-6632-4979-9a3b-c79290d7e39e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.154539] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 50cc30fa-01f0-441d-af41-76c5273123af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.165751] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ede1744b-c382-4b37-8884-11b73093f632 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.176463] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4c074098-9e15-495b-854a-109c8c5d9657 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.176652] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 758.176745] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 758.556502] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06248771-fd90-477a-9ef9-963cf4ca34da {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.563841] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49699a6-9172-4d54-86f6-22804cfb21c4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.593565] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4edfe0-fbf1-4eb9-97e4-0dc7203e9e74 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.600520] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb02ac4-9bee-47cd-bb14-fb4a5f94a366 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.613333] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.621980] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 758.636694] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 758.636883] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.779s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.637621] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.637900] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.637996] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 760.784393] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.784746] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 760.784746] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 760.804827] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.804989] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.805282] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.805420] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.805549] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.805672] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.805791] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.805909] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.806049] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.806155] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.806274] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 760.806749] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.783708] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.783940] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.210072] env[68673]: WARNING oslo_vmware.rw_handles [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 776.210072] env[68673]: ERROR oslo_vmware.rw_handles [ 776.210649] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 776.212389] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 776.212658] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Copying Virtual Disk [datastore1] vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/50dc8fc6-35a9-440d-8bc1-63be912ad6c7/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 776.212975] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a58427c-a2e1-491e-bc4c-e6c37f19bcaa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.221422] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 776.221422] env[68673]: value = "task-3433449" [ 776.221422] env[68673]: _type = "Task" [ 776.221422] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.229677] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433449, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.731613] env[68673]: DEBUG oslo_vmware.exceptions [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 776.731905] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.732997] env[68673]: ERROR nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 776.732997] env[68673]: Faults: ['InvalidArgument'] [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Traceback (most recent call last): [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] yield resources [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] self.driver.spawn(context, instance, image_meta, [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] self._fetch_image_if_missing(context, vi) [ 776.732997] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] image_cache(vi, tmp_image_ds_loc) [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] vm_util.copy_virtual_disk( [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] session._wait_for_task(vmdk_copy_task) [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] return self.wait_for_task(task_ref) [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] return evt.wait() [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] result = hub.switch() [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 776.733394] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] return self.greenlet.switch() [ 776.733805] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 776.733805] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] self.f(*self.args, **self.kw) [ 776.733805] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 776.733805] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] raise exceptions.translate_fault(task_info.error) [ 776.733805] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 776.733805] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Faults: ['InvalidArgument'] [ 776.733805] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] [ 776.733805] env[68673]: INFO nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Terminating instance [ 776.734349] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.734578] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.734818] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9acc7fe-72a1-4c6a-af55-0483805af029 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.737088] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 776.737287] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 776.738104] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d20a7f3-a20e-42fe-84e5-1e1a6f4ef83c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.744721] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 776.744871] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b37a6b67-bacb-43f6-b7f9-e8e72c6dd896 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.747079] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.747257] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 776.749050] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75376198-f24e-4df0-b192-8ec25990f80f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.752702] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Waiting for the task: (returnval){ [ 776.752702] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]520a1e10-a178-7252-6d4e-91e53a78cd19" [ 776.752702] env[68673]: _type = "Task" [ 776.752702] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.760304] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]520a1e10-a178-7252-6d4e-91e53a78cd19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.810219] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 776.810463] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 776.810643] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleting the datastore file [datastore1] d7ad9f48-d538-4bc4-b911-6e564cd5f457 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 776.810907] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-722632fd-0c2f-414d-b81b-a7f50a889b7b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.816831] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 776.816831] env[68673]: value = "task-3433451" [ 776.816831] env[68673]: _type = "Task" [ 776.816831] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.824656] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.263410] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 777.263745] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Creating directory with path [datastore1] vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 777.263881] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2448b6f2-10f3-4cee-a4c5-281efb406d93 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.276420] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Created directory with path [datastore1] vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.276598] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Fetch image to [datastore1] vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 777.276763] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 777.277452] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec62156-0e0c-4b2d-9c78-7cda13aa143d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.283614] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b630da4b-c0cc-4f6f-a716-64b0fb3744ed {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.292178] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45eca6fd-5141-4505-ad18-3a5a37863771 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.325548] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5f6d05-8ff1-4253-949d-c6452da3fd99 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.332241] env[68673]: DEBUG oslo_vmware.api [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073275} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.333667] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 777.333859] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 777.334039] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 777.334217] env[68673]: INFO nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Took 0.60 seconds to destroy the instance on the hypervisor. [ 777.335959] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-73e65768-a1b6-4c78-920b-fb215ca49b56 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.337806] env[68673]: DEBUG nova.compute.claims [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 777.337977] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.338194] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.360273] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 777.427674] env[68673]: DEBUG oslo_vmware.rw_handles [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 777.491219] env[68673]: DEBUG oslo_vmware.rw_handles [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 777.491468] env[68673]: DEBUG oslo_vmware.rw_handles [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 777.812199] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8d8e97-92b1-4187-9fde-2aebd2fcd191 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.819675] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0726f5-2a45-4a1b-880d-461dcadea3f5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.849423] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aff721e-010c-446c-b2d4-3f7f58a19f0e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.856649] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0d632f-44e6-4697-bf24-09411b5b3005 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.870168] env[68673]: DEBUG nova.compute.provider_tree [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.880792] env[68673]: DEBUG nova.scheduler.client.report [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 777.895018] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.557s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.895834] env[68673]: ERROR nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 777.895834] env[68673]: Faults: ['InvalidArgument'] [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Traceback (most recent call last): [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] self.driver.spawn(context, instance, image_meta, [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] self._vmops.spawn(context, instance, image_meta, injected_files, [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] self._fetch_image_if_missing(context, vi) [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] image_cache(vi, tmp_image_ds_loc) [ 777.895834] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] vm_util.copy_virtual_disk( [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] session._wait_for_task(vmdk_copy_task) [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] return self.wait_for_task(task_ref) [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] return evt.wait() [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] result = hub.switch() [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] return self.greenlet.switch() [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 777.896164] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] self.f(*self.args, **self.kw) [ 777.896515] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 777.896515] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] raise exceptions.translate_fault(task_info.error) [ 777.896515] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 777.896515] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Faults: ['InvalidArgument'] [ 777.896515] env[68673]: ERROR nova.compute.manager [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] [ 777.896756] env[68673]: DEBUG nova.compute.utils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 777.899539] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Build of instance d7ad9f48-d538-4bc4-b911-6e564cd5f457 was re-scheduled: A specified parameter was not correct: fileType [ 777.899539] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 777.900033] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 777.900252] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 777.900465] env[68673]: DEBUG nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 777.900657] env[68673]: DEBUG nova.network.neutron [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 778.316175] env[68673]: DEBUG nova.network.neutron [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.328037] env[68673]: INFO nova.compute.manager [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: d7ad9f48-d538-4bc4-b911-6e564cd5f457] Took 0.43 seconds to deallocate network for instance. [ 778.439446] env[68673]: INFO nova.scheduler.client.report [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleted allocations for instance d7ad9f48-d538-4bc4-b911-6e564cd5f457 [ 778.468259] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7e44bd5e-6e12-45af-a303-cf53bfbca7de tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "d7ad9f48-d538-4bc4-b911-6e564cd5f457" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.633s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.501789] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 778.570456] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.570456] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.571026] env[68673]: INFO nova.compute.claims [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.986849] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbeecd0-be30-4cc3-8555-1f28f34bca46 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.994580] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43601a9-e34c-497a-8162-b582bcf2c386 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.024079] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5641426-8cb5-4759-90a9-1d5c5c216f93 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.031397] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c85ae5-e149-4747-84e1-1f4eee395a9f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.045448] env[68673]: DEBUG nova.compute.provider_tree [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.057033] env[68673]: DEBUG nova.scheduler.client.report [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 779.075039] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.505s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.075546] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 779.108167] env[68673]: DEBUG nova.compute.utils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 779.109425] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 779.109628] env[68673]: DEBUG nova.network.neutron [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 779.119651] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 779.190960] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 779.208601] env[68673]: DEBUG nova.policy [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a5159631aca4d74a20f06e430887c18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d21d7d062a441d9be569a8cbcfc1b11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 779.230176] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 779.230176] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 779.230176] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.230329] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 779.230329] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.230329] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 779.230813] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 779.231138] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 779.232808] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 779.233299] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 779.233689] env[68673]: DEBUG nova.virt.hardware [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 779.235078] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a79cb1-708d-441b-bb02-67b19ca18942 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.244487] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f067d660-1c52-4e2f-8630-91db2ef454f9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.662074] env[68673]: DEBUG nova.network.neutron [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Successfully created port: 9e9dc1f2-45cc-4e70-9b32-f729e8102431 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.212163] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.212163] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.775105] env[68673]: DEBUG nova.network.neutron [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Successfully updated port: 9e9dc1f2-45cc-4e70-9b32-f729e8102431 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 780.790175] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "refresh_cache-dcb71746-662e-4ace-afcb-a997d236f12b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.790328] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquired lock "refresh_cache-dcb71746-662e-4ace-afcb-a997d236f12b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.792066] env[68673]: DEBUG nova.network.neutron [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 780.841150] env[68673]: DEBUG nova.network.neutron [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 780.861366] env[68673]: DEBUG nova.compute.manager [req-c1d36d19-709d-4043-956f-284740a31bb1 req-d5e02b14-4c01-4bc9-9dcc-2d8cf382f81c service nova] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Received event network-vif-plugged-9e9dc1f2-45cc-4e70-9b32-f729e8102431 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 780.861587] env[68673]: DEBUG oslo_concurrency.lockutils [req-c1d36d19-709d-4043-956f-284740a31bb1 req-d5e02b14-4c01-4bc9-9dcc-2d8cf382f81c service nova] Acquiring lock "dcb71746-662e-4ace-afcb-a997d236f12b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.861790] env[68673]: DEBUG oslo_concurrency.lockutils [req-c1d36d19-709d-4043-956f-284740a31bb1 req-d5e02b14-4c01-4bc9-9dcc-2d8cf382f81c service nova] Lock "dcb71746-662e-4ace-afcb-a997d236f12b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.861973] env[68673]: DEBUG oslo_concurrency.lockutils [req-c1d36d19-709d-4043-956f-284740a31bb1 req-d5e02b14-4c01-4bc9-9dcc-2d8cf382f81c service nova] Lock "dcb71746-662e-4ace-afcb-a997d236f12b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.862271] env[68673]: DEBUG nova.compute.manager [req-c1d36d19-709d-4043-956f-284740a31bb1 req-d5e02b14-4c01-4bc9-9dcc-2d8cf382f81c service nova] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] No waiting events found dispatching network-vif-plugged-9e9dc1f2-45cc-4e70-9b32-f729e8102431 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 780.862450] env[68673]: WARNING nova.compute.manager [req-c1d36d19-709d-4043-956f-284740a31bb1 req-d5e02b14-4c01-4bc9-9dcc-2d8cf382f81c service nova] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Received unexpected event network-vif-plugged-9e9dc1f2-45cc-4e70-9b32-f729e8102431 for instance with vm_state building and task_state spawning. [ 781.076975] env[68673]: DEBUG nova.network.neutron [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Updating instance_info_cache with network_info: [{"id": "9e9dc1f2-45cc-4e70-9b32-f729e8102431", "address": "fa:16:3e:21:77:67", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9dc1f2-45", "ovs_interfaceid": "9e9dc1f2-45cc-4e70-9b32-f729e8102431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.089219] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Releasing lock "refresh_cache-dcb71746-662e-4ace-afcb-a997d236f12b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.089507] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Instance network_info: |[{"id": "9e9dc1f2-45cc-4e70-9b32-f729e8102431", "address": "fa:16:3e:21:77:67", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9dc1f2-45", "ovs_interfaceid": "9e9dc1f2-45cc-4e70-9b32-f729e8102431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 781.089896] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:77:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e9dc1f2-45cc-4e70-9b32-f729e8102431', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 781.097983] env[68673]: DEBUG oslo.service.loopingcall [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 781.097983] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 781.098216] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4ee42a9-f2a8-4ce4-9c5c-fb68c81944f1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.118929] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 781.118929] env[68673]: value = "task-3433452" [ 781.118929] env[68673]: _type = "Task" [ 781.118929] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.126719] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433452, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.630292] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433452, 'name': CreateVM_Task, 'duration_secs': 0.283047} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.630616] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 781.631346] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.631629] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.632065] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 781.632417] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddeff688-1d65-4a89-a359-fd48005cc795 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.637539] env[68673]: DEBUG oslo_vmware.api [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for the task: (returnval){ [ 781.637539] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]527bb0a6-345a-cf5e-6a4a-54b53b73bc0f" [ 781.637539] env[68673]: _type = "Task" [ 781.637539] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.645164] env[68673]: DEBUG oslo_vmware.api [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]527bb0a6-345a-cf5e-6a4a-54b53b73bc0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.148745] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.149034] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 782.149238] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.915597] env[68673]: DEBUG nova.compute.manager [req-a6c4d30e-b5dd-4dbf-bcff-2ed4a6f8fe42 req-31904d4b-e37a-410f-a6ec-fcb47bae4adb service nova] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Received event network-changed-9e9dc1f2-45cc-4e70-9b32-f729e8102431 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 782.915843] env[68673]: DEBUG nova.compute.manager [req-a6c4d30e-b5dd-4dbf-bcff-2ed4a6f8fe42 req-31904d4b-e37a-410f-a6ec-fcb47bae4adb service nova] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Refreshing instance network info cache due to event network-changed-9e9dc1f2-45cc-4e70-9b32-f729e8102431. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 782.916084] env[68673]: DEBUG oslo_concurrency.lockutils [req-a6c4d30e-b5dd-4dbf-bcff-2ed4a6f8fe42 req-31904d4b-e37a-410f-a6ec-fcb47bae4adb service nova] Acquiring lock "refresh_cache-dcb71746-662e-4ace-afcb-a997d236f12b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.916237] env[68673]: DEBUG oslo_concurrency.lockutils [req-a6c4d30e-b5dd-4dbf-bcff-2ed4a6f8fe42 req-31904d4b-e37a-410f-a6ec-fcb47bae4adb service nova] Acquired lock "refresh_cache-dcb71746-662e-4ace-afcb-a997d236f12b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.916398] env[68673]: DEBUG nova.network.neutron [req-a6c4d30e-b5dd-4dbf-bcff-2ed4a6f8fe42 req-31904d4b-e37a-410f-a6ec-fcb47bae4adb service nova] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Refreshing network info cache for port 9e9dc1f2-45cc-4e70-9b32-f729e8102431 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 783.252041] env[68673]: DEBUG nova.network.neutron [req-a6c4d30e-b5dd-4dbf-bcff-2ed4a6f8fe42 req-31904d4b-e37a-410f-a6ec-fcb47bae4adb service nova] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Updated VIF entry in instance network info cache for port 9e9dc1f2-45cc-4e70-9b32-f729e8102431. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 783.252424] env[68673]: DEBUG nova.network.neutron [req-a6c4d30e-b5dd-4dbf-bcff-2ed4a6f8fe42 req-31904d4b-e37a-410f-a6ec-fcb47bae4adb service nova] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Updating instance_info_cache with network_info: [{"id": "9e9dc1f2-45cc-4e70-9b32-f729e8102431", "address": "fa:16:3e:21:77:67", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9dc1f2-45", "ovs_interfaceid": "9e9dc1f2-45cc-4e70-9b32-f729e8102431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.263125] env[68673]: DEBUG oslo_concurrency.lockutils [req-a6c4d30e-b5dd-4dbf-bcff-2ed4a6f8fe42 req-31904d4b-e37a-410f-a6ec-fcb47bae4adb service nova] Releasing lock "refresh_cache-dcb71746-662e-4ace-afcb-a997d236f12b" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.780721] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 817.783369] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 817.796503] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.796722] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.796899] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.797187] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 817.798747] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0effff-1bd0-4d57-abe0-3d76711ddcfc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.806794] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b268c3ff-d7f6-4c5f-bfac-bbd0d57b980c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.820435] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f3c930-37d0-411c-8981-fd8196972c1f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.827030] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87d7c77-fb76-48cb-8d8a-2840c27ec998 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.856575] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180897MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 817.856725] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.856906] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.930574] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfd5ac40-9e14-473a-8f14-895534a4642e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.930731] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance de59505b-0bbf-41b4-8d06-65ab40e8a5a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.930859] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 02517d31-0830-4e75-bde3-5f2e939f1328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.930980] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 377657cd-9913-49ec-a0f8-a701655ff68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.931117] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.931235] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.931352] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.931467] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.931579] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.931691] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.943116] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.953868] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7f2eb979-8932-4a35-a700-2fc40eb24310 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.964552] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 630479ff-b2ea-4189-91b5-1f9420715526 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.974894] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 1ac1e6de-b44b-4732-aa67-35d01ec42309 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.985491] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 45d008a0-2f5b-4477-b882-fb5039101c88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.997081] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7dccdc28-d60a-4bf5-8a4b-1db09c68a48b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.010799] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 99af213d-076b-411c-955a-5a03ff83602f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.021401] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3535ecb8-b183-4d13-b894-60bdd1dca229 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.034084] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance eeb2f3f4-2a86-499e-b400-c53120eb2067 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.044903] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 9e870122-8ffa-48e8-bf17-3ec1c06a50fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.055120] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 315775cb-2994-4099-ba4c-3cc6a15f8e0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.065358] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ff20518b-b7b8-447c-96eb-5a0f85c4db9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.075479] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 67354344-5870-4234-a9b2-33b330dfe55f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.086681] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ad920a00-482e-4f01-b357-573cc0bf7eca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.098766] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 189516d5-97b5-4339-b0d9-94d256e36c55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.110937] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance e9d6e27c-ba9a-45d5-ae1c-2558b44d9659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.122459] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7708d377-ec43-47d9-ba3d-7bb1020415cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.132380] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2148d351-6632-4979-9a3b-c79290d7e39e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.145196] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 50cc30fa-01f0-441d-af41-76c5273123af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.156560] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ede1744b-c382-4b37-8884-11b73093f632 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.166858] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4c074098-9e15-495b-854a-109c8c5d9657 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.177916] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.177916] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 818.177916] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 818.529084] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773ab5f8-9534-4dc0-8955-e17cafc63dfb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.536877] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca60f66c-23e1-4c27-9104-54dd49161046 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.568997] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0766599-851e-4703-b809-ad92250e657f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.576378] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a62a18a-cf33-4a43-8dbc-f900e39200d5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.590050] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.598579] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 818.613517] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 818.613705] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.757s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.613972] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.614267] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.614443] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.614592] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 820.784062] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.784311] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 820.784441] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 820.804147] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.804326] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805029] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805029] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805029] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805029] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805029] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805220] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805220] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805274] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 820.805402] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 820.806066] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.784607] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.815886] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.815886] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.464190] env[68673]: DEBUG oslo_concurrency.lockutils [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquiring lock "bfd5ac40-9e14-473a-8f14-895534a4642e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.632251] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquiring lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.508114] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquiring lock "02517d31-0830-4e75-bde3-5f2e939f1328" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.493389] env[68673]: WARNING oslo_vmware.rw_handles [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 827.493389] env[68673]: ERROR oslo_vmware.rw_handles [ 827.493816] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 827.495517] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 827.495820] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Copying Virtual Disk [datastore1] vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/7abae23c-b8d7-43f0-865e-35f8409f5173/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 827.496179] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-224945ca-00aa-4826-b5f6-d236e77ae78f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.505000] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Waiting for the task: (returnval){ [ 827.505000] env[68673]: value = "task-3433453" [ 827.505000] env[68673]: _type = "Task" [ 827.505000] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.512812] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Task: {'id': task-3433453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.018328] env[68673]: DEBUG oslo_vmware.exceptions [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 828.018328] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.018328] env[68673]: ERROR nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 828.018328] env[68673]: Faults: ['InvalidArgument'] [ 828.018328] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Traceback (most recent call last): [ 828.018328] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 828.018328] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] yield resources [ 828.018328] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 828.018328] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] self.driver.spawn(context, instance, image_meta, [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] self._fetch_image_if_missing(context, vi) [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] image_cache(vi, tmp_image_ds_loc) [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] vm_util.copy_virtual_disk( [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] session._wait_for_task(vmdk_copy_task) [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] return self.wait_for_task(task_ref) [ 828.018809] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] return evt.wait() [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] result = hub.switch() [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] return self.greenlet.switch() [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] self.f(*self.args, **self.kw) [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] raise exceptions.translate_fault(task_info.error) [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Faults: ['InvalidArgument'] [ 828.019223] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] [ 828.019611] env[68673]: INFO nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Terminating instance [ 828.019611] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.019611] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 828.019611] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca6d372d-52ab-40ca-9778-51eb7fa93b65 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.021566] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 828.021666] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 828.022331] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c30b9a-6f5e-4011-ad4e-2b3e2d335087 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.029131] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 828.029358] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4207698-4fe6-49bc-b57e-eff2b2cc731f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.031554] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 828.031727] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 828.032681] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3065ea21-1d01-445f-8a59-eb5348ec8b21 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.037340] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Waiting for the task: (returnval){ [ 828.037340] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52844f5e-5547-df39-447c-1b30a0e41a7b" [ 828.037340] env[68673]: _type = "Task" [ 828.037340] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.044451] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52844f5e-5547-df39-447c-1b30a0e41a7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.101061] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 828.101299] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 828.101480] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Deleting the datastore file [datastore1] bfd5ac40-9e14-473a-8f14-895534a4642e {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.102077] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dae6248e-66c3-42ba-8e99-15331e08e914 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.108489] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Waiting for the task: (returnval){ [ 828.108489] env[68673]: value = "task-3433455" [ 828.108489] env[68673]: _type = "Task" [ 828.108489] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.117173] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Task: {'id': task-3433455, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.549683] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 828.549683] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Creating directory with path [datastore1] vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 828.549683] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1081af86-50a9-4173-ab92-784c7e194ac8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.560625] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Created directory with path [datastore1] vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 828.560813] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Fetch image to [datastore1] vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 828.560976] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 828.561691] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c3ae44-ea89-48d1-a939-11d9733c6c05 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.567963] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293ef9ed-9a77-4d73-8085-f71a60489829 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.576756] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5bf67e-6865-492b-8fc5-835bf00c0d7e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.606348] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9b8477-4652-4113-b31c-e62cc7dfae98 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.613053] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a9ff8634-c178-4e41-998b-4d05b50ea447 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.617263] env[68673]: DEBUG oslo_vmware.api [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Task: {'id': task-3433455, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082525} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.617765] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.617945] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 828.618152] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 828.618331] env[68673]: INFO nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 828.620368] env[68673]: DEBUG nova.compute.claims [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 828.620540] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.620748] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.648286] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 828.709687] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 828.775986] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 828.775986] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 829.083582] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d96b490-101a-409f-a857-c5105c1aa4a1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.091979] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d994e1-4b2e-4d2b-86ce-9c44304dd2e7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.122964] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a18b19-097f-4fa1-933c-903ac7f4b5a8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.130583] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678c84ec-a347-4fef-a2bf-571828765aef {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.143692] env[68673]: DEBUG nova.compute.provider_tree [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.151759] env[68673]: DEBUG nova.scheduler.client.report [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 829.165804] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.545s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.166388] env[68673]: ERROR nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 829.166388] env[68673]: Faults: ['InvalidArgument'] [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Traceback (most recent call last): [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] self.driver.spawn(context, instance, image_meta, [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] self._fetch_image_if_missing(context, vi) [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] image_cache(vi, tmp_image_ds_loc) [ 829.166388] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] vm_util.copy_virtual_disk( [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] session._wait_for_task(vmdk_copy_task) [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] return self.wait_for_task(task_ref) [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] return evt.wait() [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] result = hub.switch() [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] return self.greenlet.switch() [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 829.166795] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] self.f(*self.args, **self.kw) [ 829.167114] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 829.167114] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] raise exceptions.translate_fault(task_info.error) [ 829.167114] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 829.167114] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Faults: ['InvalidArgument'] [ 829.167114] env[68673]: ERROR nova.compute.manager [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] [ 829.167114] env[68673]: DEBUG nova.compute.utils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 829.168927] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Build of instance bfd5ac40-9e14-473a-8f14-895534a4642e was re-scheduled: A specified parameter was not correct: fileType [ 829.168927] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 829.169309] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 829.169484] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 829.169678] env[68673]: DEBUG nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 829.169793] env[68673]: DEBUG nova.network.neutron [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 829.523317] env[68673]: DEBUG nova.network.neutron [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.536563] env[68673]: INFO nova.compute.manager [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Took 0.36 seconds to deallocate network for instance. [ 829.649766] env[68673]: INFO nova.scheduler.client.report [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Deleted allocations for instance bfd5ac40-9e14-473a-8f14-895534a4642e [ 829.672025] env[68673]: DEBUG oslo_concurrency.lockutils [None req-71fc0dfb-7721-43e6-84c6-92ed87914e7b tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "bfd5ac40-9e14-473a-8f14-895534a4642e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.147s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.672025] env[68673]: DEBUG oslo_concurrency.lockutils [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "bfd5ac40-9e14-473a-8f14-895534a4642e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.207s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.672025] env[68673]: DEBUG oslo_concurrency.lockutils [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Acquiring lock "bfd5ac40-9e14-473a-8f14-895534a4642e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.672230] env[68673]: DEBUG oslo_concurrency.lockutils [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "bfd5ac40-9e14-473a-8f14-895534a4642e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.672230] env[68673]: DEBUG oslo_concurrency.lockutils [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "bfd5ac40-9e14-473a-8f14-895534a4642e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.674858] env[68673]: INFO nova.compute.manager [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Terminating instance [ 829.679633] env[68673]: DEBUG nova.compute.manager [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 829.679633] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 829.679633] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c7892b8-de1e-4100-bac8-d194f69b77be {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.684641] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 829.690365] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47aa8972-e278-489c-9fab-03dfbb93023d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.725576] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bfd5ac40-9e14-473a-8f14-895534a4642e could not be found. [ 829.725792] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 829.725974] env[68673]: INFO nova.compute.manager [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 829.726234] env[68673]: DEBUG oslo.service.loopingcall [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.728666] env[68673]: DEBUG nova.compute.manager [-] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 829.728666] env[68673]: DEBUG nova.network.neutron [-] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 829.742534] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.742534] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.743951] env[68673]: INFO nova.compute.claims [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.754055] env[68673]: DEBUG nova.network.neutron [-] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.763790] env[68673]: INFO nova.compute.manager [-] [instance: bfd5ac40-9e14-473a-8f14-895534a4642e] Took 0.04 seconds to deallocate network for instance. [ 829.878344] env[68673]: DEBUG oslo_concurrency.lockutils [None req-aa9ec77e-3442-4bea-b2ec-c9d0ee00e80a tempest-ServerDiagnosticsTest-1594341499 tempest-ServerDiagnosticsTest-1594341499-project-member] Lock "bfd5ac40-9e14-473a-8f14-895534a4642e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.181166] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd7965c-81e0-422d-b733-8533bfd95fc2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.189832] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f3abd4-384a-4a8a-b599-50d490d8829d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.220079] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20e08be-a800-4bfd-8bee-5bd700a66ecf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.227860] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd18bb6c-04a6-42d9-8e60-16ffae38981f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.244821] env[68673]: DEBUG nova.compute.provider_tree [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.254645] env[68673]: DEBUG nova.scheduler.client.report [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 830.269125] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.526s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.269578] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 830.310140] env[68673]: DEBUG nova.compute.utils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.311437] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 830.311633] env[68673]: DEBUG nova.network.neutron [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 830.322516] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 830.387968] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 830.415971] env[68673]: DEBUG nova.policy [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5131c6b8b32d4bad97050e4415ccc4cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed2ff0bc7b3147cbb3c82451ff779c97', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 830.422318] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 830.422603] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 830.422769] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.422972] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 830.423154] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.423321] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 830.423562] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 830.423744] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 830.423929] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 830.424207] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 830.424486] env[68673]: DEBUG nova.virt.hardware [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.425802] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250db804-a58d-47a0-824b-b833525f7141 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.438018] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca53052-fa88-4e00-a241-dad4593b9eb9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.945862] env[68673]: DEBUG nova.network.neutron [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Successfully created port: d31ae53e-eef1-4997-9896-d80841cd8a27 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.946167] env[68673]: DEBUG nova.compute.manager [req-416c0f0d-bc8d-4335-832f-2a77d3a39de4 req-2d121344-bae5-4c75-8048-5cff0b808e52 service nova] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Received event network-vif-plugged-d31ae53e-eef1-4997-9896-d80841cd8a27 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 831.946167] env[68673]: DEBUG oslo_concurrency.lockutils [req-416c0f0d-bc8d-4335-832f-2a77d3a39de4 req-2d121344-bae5-4c75-8048-5cff0b808e52 service nova] Acquiring lock "adb818a9-e799-4f57-93f6-ee4e32104d61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.946167] env[68673]: DEBUG oslo_concurrency.lockutils [req-416c0f0d-bc8d-4335-832f-2a77d3a39de4 req-2d121344-bae5-4c75-8048-5cff0b808e52 service nova] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.946167] env[68673]: DEBUG oslo_concurrency.lockutils [req-416c0f0d-bc8d-4335-832f-2a77d3a39de4 req-2d121344-bae5-4c75-8048-5cff0b808e52 service nova] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.946522] env[68673]: DEBUG nova.compute.manager [req-416c0f0d-bc8d-4335-832f-2a77d3a39de4 req-2d121344-bae5-4c75-8048-5cff0b808e52 service nova] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] No waiting events found dispatching network-vif-plugged-d31ae53e-eef1-4997-9896-d80841cd8a27 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 831.946522] env[68673]: WARNING nova.compute.manager [req-416c0f0d-bc8d-4335-832f-2a77d3a39de4 req-2d121344-bae5-4c75-8048-5cff0b808e52 service nova] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Received unexpected event network-vif-plugged-d31ae53e-eef1-4997-9896-d80841cd8a27 for instance with vm_state building and task_state spawning. [ 832.124076] env[68673]: DEBUG nova.network.neutron [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Successfully updated port: d31ae53e-eef1-4997-9896-d80841cd8a27 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.135418] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquiring lock "refresh_cache-adb818a9-e799-4f57-93f6-ee4e32104d61" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.135418] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquired lock "refresh_cache-adb818a9-e799-4f57-93f6-ee4e32104d61" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.135418] env[68673]: DEBUG nova.network.neutron [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 832.214704] env[68673]: DEBUG nova.network.neutron [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.672409] env[68673]: DEBUG nova.network.neutron [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Updating instance_info_cache with network_info: [{"id": "d31ae53e-eef1-4997-9896-d80841cd8a27", "address": "fa:16:3e:15:9d:3f", "network": {"id": "2d1b98d3-9003-4f0a-9ae1-be447e432c6b", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1616347908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed2ff0bc7b3147cbb3c82451ff779c97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd31ae53e-ee", "ovs_interfaceid": "d31ae53e-eef1-4997-9896-d80841cd8a27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.687581] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Releasing lock "refresh_cache-adb818a9-e799-4f57-93f6-ee4e32104d61" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.688420] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Instance network_info: |[{"id": "d31ae53e-eef1-4997-9896-d80841cd8a27", "address": "fa:16:3e:15:9d:3f", "network": {"id": "2d1b98d3-9003-4f0a-9ae1-be447e432c6b", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1616347908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed2ff0bc7b3147cbb3c82451ff779c97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd31ae53e-ee", "ovs_interfaceid": "d31ae53e-eef1-4997-9896-d80841cd8a27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 832.689210] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:9d:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd31ae53e-eef1-4997-9896-d80841cd8a27', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.697313] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Creating folder: Project (ed2ff0bc7b3147cbb3c82451ff779c97). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 832.698841] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cce29aa-d766-447b-888f-8a1cc36d5714 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.710025] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Created folder: Project (ed2ff0bc7b3147cbb3c82451ff779c97) in parent group-v685311. [ 832.710243] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Creating folder: Instances. Parent ref: group-v685349. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 832.712272] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-281bb7d1-649b-4af7-9de9-83d4bdc70277 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.721237] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Created folder: Instances in parent group-v685349. [ 832.721544] env[68673]: DEBUG oslo.service.loopingcall [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.721872] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 832.722041] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52075b90-49e3-431e-8f3a-da2e269c27c1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.745307] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.745307] env[68673]: value = "task-3433458" [ 832.745307] env[68673]: _type = "Task" [ 832.745307] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.751741] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433458, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.255126] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433458, 'name': CreateVM_Task, 'duration_secs': 0.329517} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.255493] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 833.255832] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.255998] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.256336] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 833.257644] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f35631d1-5d3c-4f5f-964b-a4089a49cc85 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.261679] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Waiting for the task: (returnval){ [ 833.261679] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5238af1a-f3e7-783a-01ab-333fd2bb1efa" [ 833.261679] env[68673]: _type = "Task" [ 833.261679] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.271735] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5238af1a-f3e7-783a-01ab-333fd2bb1efa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.776668] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.776668] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.776668] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.192950] env[68673]: DEBUG nova.compute.manager [req-5f28eff5-450a-4977-a93b-a02b91087329 req-e225e67e-d320-4776-99ad-0224ae26803e service nova] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Received event network-changed-d31ae53e-eef1-4997-9896-d80841cd8a27 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 834.193195] env[68673]: DEBUG nova.compute.manager [req-5f28eff5-450a-4977-a93b-a02b91087329 req-e225e67e-d320-4776-99ad-0224ae26803e service nova] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Refreshing instance network info cache due to event network-changed-d31ae53e-eef1-4997-9896-d80841cd8a27. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 834.193370] env[68673]: DEBUG oslo_concurrency.lockutils [req-5f28eff5-450a-4977-a93b-a02b91087329 req-e225e67e-d320-4776-99ad-0224ae26803e service nova] Acquiring lock "refresh_cache-adb818a9-e799-4f57-93f6-ee4e32104d61" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.193511] env[68673]: DEBUG oslo_concurrency.lockutils [req-5f28eff5-450a-4977-a93b-a02b91087329 req-e225e67e-d320-4776-99ad-0224ae26803e service nova] Acquired lock "refresh_cache-adb818a9-e799-4f57-93f6-ee4e32104d61" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.193671] env[68673]: DEBUG nova.network.neutron [req-5f28eff5-450a-4977-a93b-a02b91087329 req-e225e67e-d320-4776-99ad-0224ae26803e service nova] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Refreshing network info cache for port d31ae53e-eef1-4997-9896-d80841cd8a27 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 834.800018] env[68673]: DEBUG nova.network.neutron [req-5f28eff5-450a-4977-a93b-a02b91087329 req-e225e67e-d320-4776-99ad-0224ae26803e service nova] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Updated VIF entry in instance network info cache for port d31ae53e-eef1-4997-9896-d80841cd8a27. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 834.800018] env[68673]: DEBUG nova.network.neutron [req-5f28eff5-450a-4977-a93b-a02b91087329 req-e225e67e-d320-4776-99ad-0224ae26803e service nova] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Updating instance_info_cache with network_info: [{"id": "d31ae53e-eef1-4997-9896-d80841cd8a27", "address": "fa:16:3e:15:9d:3f", "network": {"id": "2d1b98d3-9003-4f0a-9ae1-be447e432c6b", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1616347908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed2ff0bc7b3147cbb3c82451ff779c97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd31ae53e-ee", "ovs_interfaceid": "d31ae53e-eef1-4997-9896-d80841cd8a27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.816157] env[68673]: DEBUG oslo_concurrency.lockutils [req-5f28eff5-450a-4977-a93b-a02b91087329 req-e225e67e-d320-4776-99ad-0224ae26803e service nova] Releasing lock "refresh_cache-adb818a9-e799-4f57-93f6-ee4e32104d61" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.572126] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.060793] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.929185] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquiring lock "377657cd-9913-49ec-a0f8-a701655ff68d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.511883] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquiring lock "31700289-ac8c-47a9-b4e0-981b5c9df645" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.068189] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquiring lock "579c535d-7061-4822-8f7f-50b36ddfd44b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.490645] env[68673]: DEBUG oslo_concurrency.lockutils [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "dcb71746-662e-4ace-afcb-a997d236f12b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.546480] env[68673]: DEBUG oslo_concurrency.lockutils [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquiring lock "adb818a9-e799-4f57-93f6-ee4e32104d61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.999547] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.999547] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.029870] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "f4e540de-0b46-424b-894d-8ec0416d9828" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.030433] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "f4e540de-0b46-424b-894d-8ec0416d9828" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.994136] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquiring lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.994454] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.784591] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 877.785092] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 877.785092] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 877.801925] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] There are 0 instances to clean {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 877.802068] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 877.802203] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances with incomplete migration {{(pid=68673) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 877.813487] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 878.718877] env[68673]: WARNING oslo_vmware.rw_handles [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 878.718877] env[68673]: ERROR oslo_vmware.rw_handles [ 878.719320] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 878.723024] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 878.723024] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Copying Virtual Disk [datastore1] vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/acc53c69-f204-44ad-bfb1-645931fa40af/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 878.723024] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-887ef07c-8f00-4d54-bc8d-c5c50e5f0df1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.737723] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Waiting for the task: (returnval){ [ 878.737723] env[68673]: value = "task-3433466" [ 878.737723] env[68673]: _type = "Task" [ 878.737723] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.751273] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Task: {'id': task-3433466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.251663] env[68673]: DEBUG oslo_vmware.exceptions [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 879.251966] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.254916] env[68673]: ERROR nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 879.254916] env[68673]: Faults: ['InvalidArgument'] [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Traceback (most recent call last): [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] yield resources [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] self.driver.spawn(context, instance, image_meta, [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] self._fetch_image_if_missing(context, vi) [ 879.254916] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] image_cache(vi, tmp_image_ds_loc) [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] vm_util.copy_virtual_disk( [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] session._wait_for_task(vmdk_copy_task) [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] return self.wait_for_task(task_ref) [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] return evt.wait() [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] result = hub.switch() [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 879.255252] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] return self.greenlet.switch() [ 879.255596] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 879.255596] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] self.f(*self.args, **self.kw) [ 879.255596] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 879.255596] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] raise exceptions.translate_fault(task_info.error) [ 879.255596] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 879.255596] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Faults: ['InvalidArgument'] [ 879.255596] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] [ 879.255596] env[68673]: INFO nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Terminating instance [ 879.255596] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.255835] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.255835] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 879.255889] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 879.256180] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10487a00-f07e-4ea8-b993-597c38e5457e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.259136] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6465a974-086e-433b-be51-9d4b00eed325 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.270233] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 879.271741] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36ec290f-2917-4435-b292-7261c19be098 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.273573] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.273920] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 879.279659] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdd821e3-bce7-428a-8fa8-7aa03f70d21a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.285529] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Waiting for the task: (returnval){ [ 879.285529] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]522701ff-1d00-597e-42c9-288352a5f9e8" [ 879.285529] env[68673]: _type = "Task" [ 879.285529] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.296243] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]522701ff-1d00-597e-42c9-288352a5f9e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.365902] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 879.368256] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 879.368256] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Deleting the datastore file [datastore1] de59505b-0bbf-41b4-8d06-65ab40e8a5a8 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.368331] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5522bc24-e6ea-4aa4-88c3-95e20dbd012a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.375856] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Waiting for the task: (returnval){ [ 879.375856] env[68673]: value = "task-3433469" [ 879.375856] env[68673]: _type = "Task" [ 879.375856] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.385428] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Task: {'id': task-3433469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.801423] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 879.801684] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Creating directory with path [datastore1] vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.801948] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-131c08df-2a08-479b-be54-669f3663854a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.818437] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Created directory with path [datastore1] vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.818667] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Fetch image to [datastore1] vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 879.818879] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 879.819672] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa0c4ac-28bd-4c0d-86d3-31ef8ff2d394 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.823636] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 879.823832] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 879.823966] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 879.824489] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 879.829775] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb27b4fb-267c-4ac4-a9a2-da4de02422fc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.839799] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54a4925-3d89-4b33-9ff9-882b56f29fb3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.846237] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.846237] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.846237] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.846237] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 879.847529] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d53e80-453d-4426-bbe9-5c4831f8faed {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.881687] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb2f1d6-4a3f-4054-94aa-15d87885ad25 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.889491] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6987a07-7876-43f8-b581-c1cc194a92a7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.898099] env[68673]: DEBUG oslo_vmware.api [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Task: {'id': task-3433469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07772} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.899050] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.899262] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 879.899441] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 879.899632] env[68673]: INFO nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Took 0.64 seconds to destroy the instance on the hypervisor. [ 879.910766] env[68673]: DEBUG nova.compute.claims [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 879.910920] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.911620] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.918849] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfc571c-f9fe-470b-ab3d-35a5c97852c8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.921156] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f2123b8a-3fca-4161-8087-ea1e716e6ab6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.927722] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9ef86f-25f9-437b-a652-eabbcd121ede {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.959438] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180910MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 879.959612] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.961784] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 880.030158] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 880.093107] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 880.093107] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 880.376219] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad643386-69ce-4730-aac5-ef804bce1688 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.385246] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdc5fe4-d46d-41b6-ba2b-2f634c28c10f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.427879] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c41aca-390d-4caa-bdc3-7fea79b27ccb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.435592] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd34160-80d4-4a73-a8d7-e28a2b5ab241 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.451657] env[68673]: DEBUG nova.compute.provider_tree [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.464087] env[68673]: DEBUG nova.scheduler.client.report [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 880.481704] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.570s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.482626] env[68673]: ERROR nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 880.482626] env[68673]: Faults: ['InvalidArgument'] [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Traceback (most recent call last): [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] self.driver.spawn(context, instance, image_meta, [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] self._fetch_image_if_missing(context, vi) [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] image_cache(vi, tmp_image_ds_loc) [ 880.482626] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] vm_util.copy_virtual_disk( [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] session._wait_for_task(vmdk_copy_task) [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] return self.wait_for_task(task_ref) [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] return evt.wait() [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] result = hub.switch() [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] return self.greenlet.switch() [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 880.483218] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] self.f(*self.args, **self.kw) [ 880.483598] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 880.483598] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] raise exceptions.translate_fault(task_info.error) [ 880.483598] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 880.483598] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Faults: ['InvalidArgument'] [ 880.483598] env[68673]: ERROR nova.compute.manager [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] [ 880.483598] env[68673]: DEBUG nova.compute.utils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 880.485926] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.525s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.488622] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Build of instance de59505b-0bbf-41b4-8d06-65ab40e8a5a8 was re-scheduled: A specified parameter was not correct: fileType [ 880.488622] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 880.488622] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 880.488622] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 880.488622] env[68673]: DEBUG nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 880.489027] env[68673]: DEBUG nova.network.neutron [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.615181] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance de59505b-0bbf-41b4-8d06-65ab40e8a5a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.615701] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 02517d31-0830-4e75-bde3-5f2e939f1328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.615701] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 377657cd-9913-49ec-a0f8-a701655ff68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.615701] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.615850] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.615850] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.615953] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.616077] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.616192] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.616301] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.632047] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2148d351-6632-4979-9a3b-c79290d7e39e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.647810] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 50cc30fa-01f0-441d-af41-76c5273123af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.662175] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ede1744b-c382-4b37-8884-11b73093f632 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.673501] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4c074098-9e15-495b-854a-109c8c5d9657 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.685581] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.696149] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.709534] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.720074] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.720323] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 880.720464] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 881.025237] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e06df4-4b16-46ab-ba20-7554c0c77b48 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.035413] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26156e7-8941-4a4f-ad1f-947ce398c3eb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.071051] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d73216-bb05-46a2-a614-2f4dcac7d544 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.079133] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb569e64-f40e-4245-ac28-ba646198c8ca {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.094542] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.109038] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 881.127226] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 881.127430] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.643s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.284027] env[68673]: DEBUG nova.network.neutron [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.296436] env[68673]: INFO nova.compute.manager [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Took 0.81 seconds to deallocate network for instance. [ 881.503551] env[68673]: INFO nova.scheduler.client.report [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Deleted allocations for instance de59505b-0bbf-41b4-8d06-65ab40e8a5a8 [ 881.526988] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef10ce70-7c01-453e-8579-633affda7d5d tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 254.611s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.528367] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 56.896s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.528596] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Acquiring lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.528860] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.529207] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.533584] env[68673]: INFO nova.compute.manager [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Terminating instance [ 881.535540] env[68673]: DEBUG nova.compute.manager [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 881.539023] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 881.539023] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02fd58c3-dbcd-42c5-a1c5-ae265eab2625 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.551162] env[68673]: DEBUG nova.compute.manager [None req-9f496b55-4fee-459c-bddd-18da5214b14e tempest-ServersWithSpecificFlavorTestJSON-151809798 tempest-ServersWithSpecificFlavorTestJSON-151809798-project-member] [instance: 7f2eb979-8932-4a35-a700-2fc40eb24310] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.558204] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73c4b02-e907-49f9-ac26-7369e3a1802c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.594466] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance de59505b-0bbf-41b4-8d06-65ab40e8a5a8 could not be found. [ 881.594466] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 881.594466] env[68673]: INFO nova.compute.manager [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Took 0.06 seconds to destroy the instance on the hypervisor. [ 881.594466] env[68673]: DEBUG oslo.service.loopingcall [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.595382] env[68673]: DEBUG nova.compute.manager [-] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 881.595382] env[68673]: DEBUG nova.network.neutron [-] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 881.600335] env[68673]: DEBUG nova.compute.manager [None req-9f496b55-4fee-459c-bddd-18da5214b14e tempest-ServersWithSpecificFlavorTestJSON-151809798 tempest-ServersWithSpecificFlavorTestJSON-151809798-project-member] [instance: 7f2eb979-8932-4a35-a700-2fc40eb24310] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.644882] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9f496b55-4fee-459c-bddd-18da5214b14e tempest-ServersWithSpecificFlavorTestJSON-151809798 tempest-ServersWithSpecificFlavorTestJSON-151809798-project-member] Lock "7f2eb979-8932-4a35-a700-2fc40eb24310" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.619s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.656037] env[68673]: DEBUG nova.compute.manager [None req-83345f96-9f0e-43f9-8c7b-d2078befeae5 tempest-ImagesOneServerNegativeTestJSON-2141694111 tempest-ImagesOneServerNegativeTestJSON-2141694111-project-member] [instance: 630479ff-b2ea-4189-91b5-1f9420715526] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.665795] env[68673]: DEBUG nova.network.neutron [-] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.682035] env[68673]: INFO nova.compute.manager [-] [instance: de59505b-0bbf-41b4-8d06-65ab40e8a5a8] Took 0.09 seconds to deallocate network for instance. [ 881.698252] env[68673]: DEBUG nova.compute.manager [None req-83345f96-9f0e-43f9-8c7b-d2078befeae5 tempest-ImagesOneServerNegativeTestJSON-2141694111 tempest-ImagesOneServerNegativeTestJSON-2141694111-project-member] [instance: 630479ff-b2ea-4189-91b5-1f9420715526] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.727410] env[68673]: DEBUG oslo_concurrency.lockutils [None req-83345f96-9f0e-43f9-8c7b-d2078befeae5 tempest-ImagesOneServerNegativeTestJSON-2141694111 tempest-ImagesOneServerNegativeTestJSON-2141694111-project-member] Lock "630479ff-b2ea-4189-91b5-1f9420715526" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.922s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.748854] env[68673]: DEBUG nova.compute.manager [None req-7a8eded3-3c71-43ce-9443-836e020c9e69 tempest-ServerExternalEventsTest-1747200570 tempest-ServerExternalEventsTest-1747200570-project-member] [instance: 1ac1e6de-b44b-4732-aa67-35d01ec42309] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.785219] env[68673]: DEBUG nova.compute.manager [None req-7a8eded3-3c71-43ce-9443-836e020c9e69 tempest-ServerExternalEventsTest-1747200570 tempest-ServerExternalEventsTest-1747200570-project-member] [instance: 1ac1e6de-b44b-4732-aa67-35d01ec42309] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.812203] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7a8eded3-3c71-43ce-9443-836e020c9e69 tempest-ServerExternalEventsTest-1747200570 tempest-ServerExternalEventsTest-1747200570-project-member] Lock "1ac1e6de-b44b-4732-aa67-35d01ec42309" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.666s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.829077] env[68673]: DEBUG nova.compute.manager [None req-1f9021c0-81a0-45e0-a068-bf9d4fee2d58 tempest-ServersNegativeTestJSON-723567342 tempest-ServersNegativeTestJSON-723567342-project-member] [instance: 45d008a0-2f5b-4477-b882-fb5039101c88] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.838915] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2658e48e-d712-4fe4-9713-627f7ebcf6c8 tempest-ServerMetadataNegativeTestJSON-1822384500 tempest-ServerMetadataNegativeTestJSON-1822384500-project-member] Lock "de59505b-0bbf-41b4-8d06-65ab40e8a5a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.310s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.864672] env[68673]: DEBUG nova.compute.manager [None req-1f9021c0-81a0-45e0-a068-bf9d4fee2d58 tempest-ServersNegativeTestJSON-723567342 tempest-ServersNegativeTestJSON-723567342-project-member] [instance: 45d008a0-2f5b-4477-b882-fb5039101c88] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.026762] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1f9021c0-81a0-45e0-a068-bf9d4fee2d58 tempest-ServersNegativeTestJSON-723567342 tempest-ServersNegativeTestJSON-723567342-project-member] Lock "45d008a0-2f5b-4477-b882-fb5039101c88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.742s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.039567] env[68673]: DEBUG nova.compute.manager [None req-64b76611-7407-4df0-85ac-7eb14c1bbe7d tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] [instance: 7dccdc28-d60a-4bf5-8a4b-1db09c68a48b] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.082220] env[68673]: DEBUG nova.compute.manager [None req-64b76611-7407-4df0-85ac-7eb14c1bbe7d tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] [instance: 7dccdc28-d60a-4bf5-8a4b-1db09c68a48b] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.118039] env[68673]: DEBUG oslo_concurrency.lockutils [None req-64b76611-7407-4df0-85ac-7eb14c1bbe7d tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] Lock "7dccdc28-d60a-4bf5-8a4b-1db09c68a48b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.595s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.130671] env[68673]: DEBUG nova.compute.manager [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 99af213d-076b-411c-955a-5a03ff83602f] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.162393] env[68673]: DEBUG nova.compute.manager [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 99af213d-076b-411c-955a-5a03ff83602f] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.198440] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "99af213d-076b-411c-955a-5a03ff83602f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.566s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.213962] env[68673]: DEBUG nova.compute.manager [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 3535ecb8-b183-4d13-b894-60bdd1dca229] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.241853] env[68673]: DEBUG nova.compute.manager [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 3535ecb8-b183-4d13-b894-60bdd1dca229] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.267633] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e4a58f5c-08eb-4cb0-88cf-e0bfca705e2a tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "3535ecb8-b183-4d13-b894-60bdd1dca229" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.607s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.282633] env[68673]: DEBUG nova.compute.manager [None req-ceed0630-fce6-40c1-81f3-c59b3463b300 tempest-ServerActionsTestOtherB-436139619 tempest-ServerActionsTestOtherB-436139619-project-member] [instance: eeb2f3f4-2a86-499e-b400-c53120eb2067] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.308300] env[68673]: DEBUG nova.compute.manager [None req-ceed0630-fce6-40c1-81f3-c59b3463b300 tempest-ServerActionsTestOtherB-436139619 tempest-ServerActionsTestOtherB-436139619-project-member] [instance: eeb2f3f4-2a86-499e-b400-c53120eb2067] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.334044] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ceed0630-fce6-40c1-81f3-c59b3463b300 tempest-ServerActionsTestOtherB-436139619 tempest-ServerActionsTestOtherB-436139619-project-member] Lock "eeb2f3f4-2a86-499e-b400-c53120eb2067" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.242s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.346566] env[68673]: DEBUG nova.compute.manager [None req-ab406369-deb8-48ff-a34e-e150f318fe83 tempest-ServersTestFqdnHostnames-1604400882 tempest-ServersTestFqdnHostnames-1604400882-project-member] [instance: 9e870122-8ffa-48e8-bf17-3ec1c06a50fd] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.381774] env[68673]: DEBUG nova.compute.manager [None req-ab406369-deb8-48ff-a34e-e150f318fe83 tempest-ServersTestFqdnHostnames-1604400882 tempest-ServersTestFqdnHostnames-1604400882-project-member] [instance: 9e870122-8ffa-48e8-bf17-3ec1c06a50fd] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.416124] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ab406369-deb8-48ff-a34e-e150f318fe83 tempest-ServersTestFqdnHostnames-1604400882 tempest-ServersTestFqdnHostnames-1604400882-project-member] Lock "9e870122-8ffa-48e8-bf17-3ec1c06a50fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.898s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.438757] env[68673]: DEBUG nova.compute.manager [None req-2d899b25-40f2-4cf7-9cf6-92ae6f3719a6 tempest-ServerAddressesNegativeTestJSON-104457049 tempest-ServerAddressesNegativeTestJSON-104457049-project-member] [instance: 315775cb-2994-4099-ba4c-3cc6a15f8e0f] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.498880] env[68673]: DEBUG nova.compute.manager [None req-2d899b25-40f2-4cf7-9cf6-92ae6f3719a6 tempest-ServerAddressesNegativeTestJSON-104457049 tempest-ServerAddressesNegativeTestJSON-104457049-project-member] [instance: 315775cb-2994-4099-ba4c-3cc6a15f8e0f] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.538829] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2d899b25-40f2-4cf7-9cf6-92ae6f3719a6 tempest-ServerAddressesNegativeTestJSON-104457049 tempest-ServerAddressesNegativeTestJSON-104457049-project-member] Lock "315775cb-2994-4099-ba4c-3cc6a15f8e0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.286s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.561860] env[68673]: DEBUG nova.compute.manager [None req-fa96f40e-1b55-4b60-a49c-28bc2d6c5647 tempest-FloatingIPsAssociationTestJSON-397346482 tempest-FloatingIPsAssociationTestJSON-397346482-project-member] [instance: ff20518b-b7b8-447c-96eb-5a0f85c4db9e] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.611233] env[68673]: DEBUG nova.compute.manager [None req-fa96f40e-1b55-4b60-a49c-28bc2d6c5647 tempest-FloatingIPsAssociationTestJSON-397346482 tempest-FloatingIPsAssociationTestJSON-397346482-project-member] [instance: ff20518b-b7b8-447c-96eb-5a0f85c4db9e] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.657318] env[68673]: DEBUG oslo_concurrency.lockutils [None req-fa96f40e-1b55-4b60-a49c-28bc2d6c5647 tempest-FloatingIPsAssociationTestJSON-397346482 tempest-FloatingIPsAssociationTestJSON-397346482-project-member] Lock "ff20518b-b7b8-447c-96eb-5a0f85c4db9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.537s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.682582] env[68673]: DEBUG nova.compute.manager [None req-ed0833da-9945-46ee-85cd-04685b2b6bc3 tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] [instance: 67354344-5870-4234-a9b2-33b330dfe55f] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.716060] env[68673]: DEBUG nova.compute.manager [None req-ed0833da-9945-46ee-85cd-04685b2b6bc3 tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] [instance: 67354344-5870-4234-a9b2-33b330dfe55f] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.756576] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed0833da-9945-46ee-85cd-04685b2b6bc3 tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] Lock "67354344-5870-4234-a9b2-33b330dfe55f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.521s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.777955] env[68673]: DEBUG nova.compute.manager [None req-63b908b9-74bf-4aa4-88bf-4a3e4f28c28e tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] [instance: ad920a00-482e-4f01-b357-573cc0bf7eca] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.819907] env[68673]: DEBUG nova.compute.manager [None req-63b908b9-74bf-4aa4-88bf-4a3e4f28c28e tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] [instance: ad920a00-482e-4f01-b357-573cc0bf7eca] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.855840] env[68673]: DEBUG oslo_concurrency.lockutils [None req-63b908b9-74bf-4aa4-88bf-4a3e4f28c28e tempest-ServersAdminTestJSON-1500515411 tempest-ServersAdminTestJSON-1500515411-project-member] Lock "ad920a00-482e-4f01-b357-573cc0bf7eca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.141s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.871265] env[68673]: DEBUG nova.compute.manager [None req-dabb2182-9014-4ddd-a854-5ffedc276fae tempest-ServerShowV257Test-1275903674 tempest-ServerShowV257Test-1275903674-project-member] [instance: 189516d5-97b5-4339-b0d9-94d256e36c55] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.904707] env[68673]: DEBUG nova.compute.manager [None req-dabb2182-9014-4ddd-a854-5ffedc276fae tempest-ServerShowV257Test-1275903674 tempest-ServerShowV257Test-1275903674-project-member] [instance: 189516d5-97b5-4339-b0d9-94d256e36c55] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 882.938855] env[68673]: DEBUG oslo_concurrency.lockutils [None req-dabb2182-9014-4ddd-a854-5ffedc276fae tempest-ServerShowV257Test-1275903674 tempest-ServerShowV257Test-1275903674-project-member] Lock "189516d5-97b5-4339-b0d9-94d256e36c55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.867s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.956063] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.956383] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.959666] env[68673]: DEBUG nova.compute.manager [None req-5627b011-47ae-47ed-9355-27c7ccea17ca tempest-InstanceActionsTestJSON-1167780584 tempest-InstanceActionsTestJSON-1167780584-project-member] [instance: e9d6e27c-ba9a-45d5-ae1c-2558b44d9659] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.994443] env[68673]: DEBUG nova.compute.manager [None req-5627b011-47ae-47ed-9355-27c7ccea17ca tempest-InstanceActionsTestJSON-1167780584 tempest-InstanceActionsTestJSON-1167780584-project-member] [instance: e9d6e27c-ba9a-45d5-ae1c-2558b44d9659] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 883.019601] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5627b011-47ae-47ed-9355-27c7ccea17ca tempest-InstanceActionsTestJSON-1167780584 tempest-InstanceActionsTestJSON-1167780584-project-member] Lock "e9d6e27c-ba9a-45d5-ae1c-2558b44d9659" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.415s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.035462] env[68673]: DEBUG nova.compute.manager [None req-df7ceddc-a5ff-4352-990f-0dbea8a217d5 tempest-ServerDiagnosticsV248Test-1763732598 tempest-ServerDiagnosticsV248Test-1763732598-project-member] [instance: 7708d377-ec43-47d9-ba3d-7bb1020415cb] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 883.079400] env[68673]: DEBUG nova.compute.manager [None req-df7ceddc-a5ff-4352-990f-0dbea8a217d5 tempest-ServerDiagnosticsV248Test-1763732598 tempest-ServerDiagnosticsV248Test-1763732598-project-member] [instance: 7708d377-ec43-47d9-ba3d-7bb1020415cb] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 883.089414] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.089414] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 883.089414] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 883.118587] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.121363] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.121510] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.121640] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.121767] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.121896] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.122224] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.122224] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.122323] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 883.122451] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 883.123684] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df7ceddc-a5ff-4352-990f-0dbea8a217d5 tempest-ServerDiagnosticsV248Test-1763732598 tempest-ServerDiagnosticsV248Test-1763732598-project-member] Lock "7708d377-ec43-47d9-ba3d-7bb1020415cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.481s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.124221] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.125973] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.126507] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.145539] env[68673]: DEBUG nova.compute.manager [None req-6877aa12-0f90-4e05-9e6f-6027e005aec3 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: 2148d351-6632-4979-9a3b-c79290d7e39e] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 883.197245] env[68673]: DEBUG nova.compute.manager [None req-6877aa12-0f90-4e05-9e6f-6027e005aec3 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] [instance: 2148d351-6632-4979-9a3b-c79290d7e39e] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 883.233683] env[68673]: DEBUG oslo_concurrency.lockutils [None req-6877aa12-0f90-4e05-9e6f-6027e005aec3 tempest-DeleteServersAdminTestJSON-700717692 tempest-DeleteServersAdminTestJSON-700717692-project-member] Lock "2148d351-6632-4979-9a3b-c79290d7e39e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.096s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.251301] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 883.336981] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.336981] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.336981] env[68673]: INFO nova.compute.claims [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.405441] env[68673]: DEBUG nova.scheduler.client.report [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Refreshing inventories for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 883.423448] env[68673]: DEBUG nova.scheduler.client.report [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Updating ProviderTree inventory for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 883.424051] env[68673]: DEBUG nova.compute.provider_tree [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Updating inventory in ProviderTree for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.438736] env[68673]: DEBUG nova.scheduler.client.report [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Refreshing aggregate associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, aggregates: None {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 883.467788] env[68673]: DEBUG nova.scheduler.client.report [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Refreshing trait associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 883.772970] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0611e9f-7b92-4620-b460-b9f77157b33d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.781282] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb3a4bc-3ff0-47b4-8b4f-968ecd572145 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.784685] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.819478] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5909335f-2390-478a-ac45-8c25957e452a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.827220] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036ca4a9-4ec8-42cb-8f7e-b83b4ebde627 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.840807] env[68673]: DEBUG nova.compute.provider_tree [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.850418] env[68673]: DEBUG nova.scheduler.client.report [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 883.866068] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.531s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.866574] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 883.908958] env[68673]: DEBUG nova.compute.utils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 883.910665] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 883.910891] env[68673]: DEBUG nova.network.neutron [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 883.926964] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 884.010165] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 884.041748] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 884.042063] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 884.042164] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.042347] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 884.042495] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.042941] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 884.043253] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 884.043574] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 884.043574] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 884.043806] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 884.043867] env[68673]: DEBUG nova.virt.hardware [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 884.045057] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a561a579-3c58-487d-8427-41fb2bd2bee1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.054093] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d32bb7-8645-448a-bef0-f1fa7a2320cf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.153027] env[68673]: DEBUG nova.policy [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '485a8fb297a048b6b431357657074a74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd63c38b685994e44b17d74be2c8bbe04', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 884.540561] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a5f305b-baaa-4f1e-9220-fce7022f08c1 tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] Acquiring lock "52738695-8a04-457a-a58e-46c214422409" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.540561] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a5f305b-baaa-4f1e-9220-fce7022f08c1 tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] Lock "52738695-8a04-457a-a58e-46c214422409" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.674562] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "50cc30fa-01f0-441d-af41-76c5273123af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.644754] env[68673]: DEBUG nova.network.neutron [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Successfully created port: af467d78-9416-4fdb-ad77-57ed531d324f {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.672991] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1d60da41-c424-42a2-9520-fe114ce2a154 tempest-ServersV294TestFqdnHostnames-1595699051 tempest-ServersV294TestFqdnHostnames-1595699051-project-member] Acquiring lock "5b2c34cf-302d-4898-b3f5-d0feb6c1bcc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.673315] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1d60da41-c424-42a2-9520-fe114ce2a154 tempest-ServersV294TestFqdnHostnames-1595699051 tempest-ServersV294TestFqdnHostnames-1595699051-project-member] Lock "5b2c34cf-302d-4898-b3f5-d0feb6c1bcc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.228557] env[68673]: DEBUG nova.network.neutron [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Successfully updated port: af467d78-9416-4fdb-ad77-57ed531d324f {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.244338] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.244546] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquired lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.244761] env[68673]: DEBUG nova.network.neutron [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 888.541383] env[68673]: DEBUG nova.network.neutron [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 888.620768] env[68673]: DEBUG nova.compute.manager [req-744c09ac-2446-4660-8091-a62eddd16f31 req-9bfd1e1c-a10a-4340-b456-4d2a3761adbc service nova] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Received event network-vif-plugged-af467d78-9416-4fdb-ad77-57ed531d324f {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 888.620768] env[68673]: DEBUG oslo_concurrency.lockutils [req-744c09ac-2446-4660-8091-a62eddd16f31 req-9bfd1e1c-a10a-4340-b456-4d2a3761adbc service nova] Acquiring lock "50cc30fa-01f0-441d-af41-76c5273123af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.620768] env[68673]: DEBUG oslo_concurrency.lockutils [req-744c09ac-2446-4660-8091-a62eddd16f31 req-9bfd1e1c-a10a-4340-b456-4d2a3761adbc service nova] Lock "50cc30fa-01f0-441d-af41-76c5273123af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.620768] env[68673]: DEBUG oslo_concurrency.lockutils [req-744c09ac-2446-4660-8091-a62eddd16f31 req-9bfd1e1c-a10a-4340-b456-4d2a3761adbc service nova] Lock "50cc30fa-01f0-441d-af41-76c5273123af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.621053] env[68673]: DEBUG nova.compute.manager [req-744c09ac-2446-4660-8091-a62eddd16f31 req-9bfd1e1c-a10a-4340-b456-4d2a3761adbc service nova] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] No waiting events found dispatching network-vif-plugged-af467d78-9416-4fdb-ad77-57ed531d324f {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 888.621219] env[68673]: WARNING nova.compute.manager [req-744c09ac-2446-4660-8091-a62eddd16f31 req-9bfd1e1c-a10a-4340-b456-4d2a3761adbc service nova] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Received unexpected event network-vif-plugged-af467d78-9416-4fdb-ad77-57ed531d324f for instance with vm_state building and task_state deleting. [ 888.924665] env[68673]: DEBUG nova.network.neutron [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Updating instance_info_cache with network_info: [{"id": "af467d78-9416-4fdb-ad77-57ed531d324f", "address": "fa:16:3e:0a:b9:48", "network": {"id": "3b64f0e6-4614-4893-a56b-f57efcbc70ff", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1578823100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d63c38b685994e44b17d74be2c8bbe04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf467d78-94", "ovs_interfaceid": "af467d78-9416-4fdb-ad77-57ed531d324f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.943281] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Releasing lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.943585] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance network_info: |[{"id": "af467d78-9416-4fdb-ad77-57ed531d324f", "address": "fa:16:3e:0a:b9:48", "network": {"id": "3b64f0e6-4614-4893-a56b-f57efcbc70ff", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1578823100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d63c38b685994e44b17d74be2c8bbe04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf467d78-94", "ovs_interfaceid": "af467d78-9416-4fdb-ad77-57ed531d324f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 888.943991] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:b9:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af467d78-9416-4fdb-ad77-57ed531d324f', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.953304] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Creating folder: Project (d63c38b685994e44b17d74be2c8bbe04). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 888.954012] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad6841dd-742c-4357-a1c4-82e3af49bdab {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.970939] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Created folder: Project (d63c38b685994e44b17d74be2c8bbe04) in parent group-v685311. [ 888.970939] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Creating folder: Instances. Parent ref: group-v685356. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 888.970939] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44253f8f-42f2-48f5-8e70-9e545193e3c1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.980657] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Created folder: Instances in parent group-v685356. [ 888.980657] env[68673]: DEBUG oslo.service.loopingcall [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.980657] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 888.980869] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47c04212-bd75-4b01-bb6a-e3b3d8dab481 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.005031] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 889.005031] env[68673]: value = "task-3433475" [ 889.005031] env[68673]: _type = "Task" [ 889.005031] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.030085] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433475, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.515381] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433475, 'name': CreateVM_Task, 'duration_secs': 0.327459} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.515908] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 889.517181] env[68673]: DEBUG oslo_vmware.service [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e4a977-56f5-4c5a-8161-4df4c82499d1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.524551] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.524680] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.525606] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 889.525932] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7ed2790-fc4b-47f7-b616-647eb08ce2ae {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.531078] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Waiting for the task: (returnval){ [ 889.531078] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52518406-5102-7642-e57e-4375d9415f66" [ 889.531078] env[68673]: _type = "Task" [ 889.531078] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.539356] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52518406-5102-7642-e57e-4375d9415f66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.629857] env[68673]: DEBUG oslo_concurrency.lockutils [None req-439aa039-c81a-4d3c-85d0-559c6e9472e7 tempest-ServersTestBootFromVolume-927148198 tempest-ServersTestBootFromVolume-927148198-project-member] Acquiring lock "78561ca7-b99f-414f-a8f9-1abf127ae3be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.630183] env[68673]: DEBUG oslo_concurrency.lockutils [None req-439aa039-c81a-4d3c-85d0-559c6e9472e7 tempest-ServersTestBootFromVolume-927148198 tempest-ServersTestBootFromVolume-927148198-project-member] Lock "78561ca7-b99f-414f-a8f9-1abf127ae3be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.051025] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.051025] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.051025] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.051025] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.051314] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.051314] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54faf1e8-365f-4d56-a41a-bed7c600ce79 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.062707] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.062707] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 890.063131] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d0163d-c54e-472e-b803-9d7957a256cc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.071850] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a19f3f7-f16a-48e0-bdef-460f29a30a3c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.078305] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Waiting for the task: (returnval){ [ 890.078305] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]529f84d6-2f9e-ec9d-dbd4-780ba1ba7f9e" [ 890.078305] env[68673]: _type = "Task" [ 890.078305] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.088513] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]529f84d6-2f9e-ec9d-dbd4-780ba1ba7f9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.384049] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1fdd4b34-903e-4539-962a-4b2c0e4f5810 tempest-ServerPasswordTestJSON-1615801894 tempest-ServerPasswordTestJSON-1615801894-project-member] Acquiring lock "187f308b-b2b4-4b60-b490-71b8a74f916e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.384307] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1fdd4b34-903e-4539-962a-4b2c0e4f5810 tempest-ServerPasswordTestJSON-1615801894 tempest-ServerPasswordTestJSON-1615801894-project-member] Lock "187f308b-b2b4-4b60-b490-71b8a74f916e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.590033] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 890.590472] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Creating directory with path [datastore2] vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.591094] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cfda2bf-2e3e-42b6-9a22-9f28d04980ab {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.626686] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Created directory with path [datastore2] vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.626802] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Fetch image to [datastore2] vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 890.626931] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore2] vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore2 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 890.627951] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abed35c-a326-48df-8ee4-202be3662474 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.641178] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73406eb8-5650-4571-adab-e23b101d36eb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.652168] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c377d1-f82d-42e3-9cf4-76b2c425f5a7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.686075] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d1733f-3176-4614-bcca-8467041b010a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.694538] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3f9d939c-2802-46e9-a684-1bc359c136f8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.713896] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore2 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 890.777788] env[68673]: DEBUG oslo_vmware.rw_handles [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 890.838321] env[68673]: DEBUG oslo_vmware.rw_handles [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 890.838541] env[68673]: DEBUG oslo_vmware.rw_handles [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 891.840281] env[68673]: DEBUG nova.compute.manager [req-e4ac0957-7b83-4c90-bcfd-f9f4402a7070 req-4115ed4f-87a0-493e-99b7-7637a98448fd service nova] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Received event network-changed-af467d78-9416-4fdb-ad77-57ed531d324f {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 891.840281] env[68673]: DEBUG nova.compute.manager [req-e4ac0957-7b83-4c90-bcfd-f9f4402a7070 req-4115ed4f-87a0-493e-99b7-7637a98448fd service nova] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Refreshing instance network info cache due to event network-changed-af467d78-9416-4fdb-ad77-57ed531d324f. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 891.840629] env[68673]: DEBUG oslo_concurrency.lockutils [req-e4ac0957-7b83-4c90-bcfd-f9f4402a7070 req-4115ed4f-87a0-493e-99b7-7637a98448fd service nova] Acquiring lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.840629] env[68673]: DEBUG oslo_concurrency.lockutils [req-e4ac0957-7b83-4c90-bcfd-f9f4402a7070 req-4115ed4f-87a0-493e-99b7-7637a98448fd service nova] Acquired lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.840705] env[68673]: DEBUG nova.network.neutron [req-e4ac0957-7b83-4c90-bcfd-f9f4402a7070 req-4115ed4f-87a0-493e-99b7-7637a98448fd service nova] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Refreshing network info cache for port af467d78-9416-4fdb-ad77-57ed531d324f {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 891.937444] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd6356da-0f3e-4cc5-9794-3385db516be9 tempest-ServerActionsTestOtherA-60787225 tempest-ServerActionsTestOtherA-60787225-project-member] Acquiring lock "a31635f4-c7c8-4498-b825-b3a159400096" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.937982] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd6356da-0f3e-4cc5-9794-3385db516be9 tempest-ServerActionsTestOtherA-60787225 tempest-ServerActionsTestOtherA-60787225-project-member] Lock "a31635f4-c7c8-4498-b825-b3a159400096" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.501322] env[68673]: DEBUG nova.network.neutron [req-e4ac0957-7b83-4c90-bcfd-f9f4402a7070 req-4115ed4f-87a0-493e-99b7-7637a98448fd service nova] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Updated VIF entry in instance network info cache for port af467d78-9416-4fdb-ad77-57ed531d324f. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 892.504652] env[68673]: DEBUG nova.network.neutron [req-e4ac0957-7b83-4c90-bcfd-f9f4402a7070 req-4115ed4f-87a0-493e-99b7-7637a98448fd service nova] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Updating instance_info_cache with network_info: [{"id": "af467d78-9416-4fdb-ad77-57ed531d324f", "address": "fa:16:3e:0a:b9:48", "network": {"id": "3b64f0e6-4614-4893-a56b-f57efcbc70ff", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1578823100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d63c38b685994e44b17d74be2c8bbe04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf467d78-94", "ovs_interfaceid": "af467d78-9416-4fdb-ad77-57ed531d324f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.519777] env[68673]: DEBUG oslo_concurrency.lockutils [req-e4ac0957-7b83-4c90-bcfd-f9f4402a7070 req-4115ed4f-87a0-493e-99b7-7637a98448fd service nova] Releasing lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.711987] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51f0beb-be7e-4179-bacc-4798cb48b661 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] Acquiring lock "a4561267-9893-4c7f-b3cb-6887cf740cd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.712284] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51f0beb-be7e-4179-bacc-4798cb48b661 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] Lock "a4561267-9893-4c7f-b3cb-6887cf740cd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.541950] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ac98c201-1735-476f-adbf-6e0384fe3ab8 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] Acquiring lock "84ce4cab-05fb-46fe-b59f-c2851812c5dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.543454] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ac98c201-1735-476f-adbf-6e0384fe3ab8 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] Lock "84ce4cab-05fb-46fe-b59f-c2851812c5dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.779329] env[68673]: DEBUG oslo_concurrency.lockutils [None req-01d2278e-6966-46e8-ac6b-104a93cbf7af tempest-ServersListShow296Test-559948396 tempest-ServersListShow296Test-559948396-project-member] Acquiring lock "c86a2c4a-17c4-48da-aafc-ff2aa2f3699a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.779603] env[68673]: DEBUG oslo_concurrency.lockutils [None req-01d2278e-6966-46e8-ac6b-104a93cbf7af tempest-ServersListShow296Test-559948396 tempest-ServersListShow296Test-559948396-project-member] Lock "c86a2c4a-17c4-48da-aafc-ff2aa2f3699a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.626651] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ebe537e1-77cc-4913-8df9-e6c81bf13853 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "11d7e9e7-66ae-4e0c-abad-9542d6716ba6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.626651] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ebe537e1-77cc-4913-8df9-e6c81bf13853 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "11d7e9e7-66ae-4e0c-abad-9542d6716ba6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.171341] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1e7f9180-96d6-426d-8353-2b907a4a4b59 tempest-ServerActionsV293TestJSON-289299878 tempest-ServerActionsV293TestJSON-289299878-project-member] Acquiring lock "2538fc39-0bdf-45d4-a0c2-5a0c61b6b39e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.171631] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1e7f9180-96d6-426d-8353-2b907a4a4b59 tempest-ServerActionsV293TestJSON-289299878 tempest-ServerActionsV293TestJSON-289299878-project-member] Lock "2538fc39-0bdf-45d4-a0c2-5a0c61b6b39e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.732879] env[68673]: WARNING oslo_vmware.rw_handles [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 926.732879] env[68673]: ERROR oslo_vmware.rw_handles [ 926.733668] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 926.735071] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 926.735322] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Copying Virtual Disk [datastore1] vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/c2283cab-b906-48aa-b218-9f10daede991/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 926.735594] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-318157a4-269d-40df-9a5e-c12c137ceb71 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.744741] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Waiting for the task: (returnval){ [ 926.744741] env[68673]: value = "task-3433486" [ 926.744741] env[68673]: _type = "Task" [ 926.744741] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.753726] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Task: {'id': task-3433486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.259214] env[68673]: DEBUG oslo_vmware.exceptions [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 927.259278] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.260018] env[68673]: ERROR nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 927.260018] env[68673]: Faults: ['InvalidArgument'] [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Traceback (most recent call last): [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] yield resources [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] self.driver.spawn(context, instance, image_meta, [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] self._vmops.spawn(context, instance, image_meta, injected_files, [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] self._fetch_image_if_missing(context, vi) [ 927.260018] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] image_cache(vi, tmp_image_ds_loc) [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] vm_util.copy_virtual_disk( [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] session._wait_for_task(vmdk_copy_task) [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] return self.wait_for_task(task_ref) [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] return evt.wait() [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] result = hub.switch() [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 927.260517] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] return self.greenlet.switch() [ 927.260983] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 927.260983] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] self.f(*self.args, **self.kw) [ 927.260983] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 927.260983] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] raise exceptions.translate_fault(task_info.error) [ 927.260983] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 927.260983] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Faults: ['InvalidArgument'] [ 927.260983] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] [ 927.261601] env[68673]: INFO nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Terminating instance [ 927.263801] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.264093] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.264419] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f81087f-12e7-408d-a2ff-fbd262997bcc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.267672] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 927.267958] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 927.269717] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fc6e45-40e5-4514-8998-fe2bd040c68e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.279188] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 927.280521] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4aecfe3-5a68-4fe6-93be-542072453b37 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.282547] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.282741] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 927.283698] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c8b65d7-018e-4c35-8fee-7fecd07f0ab4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.290214] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Waiting for the task: (returnval){ [ 927.290214] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52a4ab40-f30e-94c6-5cd9-29d7689924a6" [ 927.290214] env[68673]: _type = "Task" [ 927.290214] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.299088] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52a4ab40-f30e-94c6-5cd9-29d7689924a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.365231] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 927.365546] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 927.365780] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Deleting the datastore file [datastore1] 02517d31-0830-4e75-bde3-5f2e939f1328 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.366114] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5917566-0e08-4bf1-9240-6ee8716cc9b9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.373329] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Waiting for the task: (returnval){ [ 927.373329] env[68673]: value = "task-3433488" [ 927.373329] env[68673]: _type = "Task" [ 927.373329] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.381814] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Task: {'id': task-3433488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.801902] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 927.801902] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Creating directory with path [datastore1] vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.801902] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4df8e03d-6bc9-4137-a383-23b67fcd8816 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.813616] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Created directory with path [datastore1] vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.813865] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Fetch image to [datastore1] vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 927.815050] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 927.815050] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d9e295-7d10-41ed-9148-13b9afb8761b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.822020] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7886003-5899-4600-9ea8-3e49c11c2d30 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.832205] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65a9f63-596a-46bc-af33-ffeab8a7dfe1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.864516] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52320774-f54e-420e-82d1-e12468a03130 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.871501] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5072a97c-a61e-4c74-96dc-f85ff76fcb45 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.881208] env[68673]: DEBUG oslo_vmware.api [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Task: {'id': task-3433488, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067051} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.881463] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.881645] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 927.881811] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 927.881995] env[68673]: INFO nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Took 0.61 seconds to destroy the instance on the hypervisor. [ 927.884157] env[68673]: DEBUG nova.compute.claims [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 927.884350] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.884537] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.894382] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 928.031344] env[68673]: DEBUG oslo_vmware.rw_handles [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 928.094349] env[68673]: DEBUG oslo_vmware.rw_handles [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 928.094541] env[68673]: DEBUG oslo_vmware.rw_handles [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 928.301689] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29161a0-3edc-4ef3-ac50-8001143e46b2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.309833] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdc571d-03d9-4bfa-8127-21bfe5fbae63 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.346190] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838a48e8-6501-4c0c-a9f4-9cb2bb2834a6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.355169] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05718c8a-8414-421b-a9e5-3d12b262d863 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.370542] env[68673]: DEBUG nova.compute.provider_tree [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.384058] env[68673]: DEBUG nova.scheduler.client.report [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 928.403585] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.519s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.404371] env[68673]: ERROR nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 928.404371] env[68673]: Faults: ['InvalidArgument'] [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Traceback (most recent call last): [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] self.driver.spawn(context, instance, image_meta, [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] self._vmops.spawn(context, instance, image_meta, injected_files, [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] self._fetch_image_if_missing(context, vi) [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] image_cache(vi, tmp_image_ds_loc) [ 928.404371] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] vm_util.copy_virtual_disk( [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] session._wait_for_task(vmdk_copy_task) [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] return self.wait_for_task(task_ref) [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] return evt.wait() [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] result = hub.switch() [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] return self.greenlet.switch() [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 928.404823] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] self.f(*self.args, **self.kw) [ 928.405242] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 928.405242] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] raise exceptions.translate_fault(task_info.error) [ 928.405242] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 928.405242] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Faults: ['InvalidArgument'] [ 928.405242] env[68673]: ERROR nova.compute.manager [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] [ 928.406243] env[68673]: DEBUG nova.compute.utils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 928.410505] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Build of instance 02517d31-0830-4e75-bde3-5f2e939f1328 was re-scheduled: A specified parameter was not correct: fileType [ 928.410505] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 928.410505] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 928.410505] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 928.410505] env[68673]: DEBUG nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 928.410740] env[68673]: DEBUG nova.network.neutron [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 928.814370] env[68673]: DEBUG nova.network.neutron [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.825756] env[68673]: INFO nova.compute.manager [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Took 0.42 seconds to deallocate network for instance. [ 928.926676] env[68673]: INFO nova.scheduler.client.report [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Deleted allocations for instance 02517d31-0830-4e75-bde3-5f2e939f1328 [ 928.952707] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7c4b0294-8e80-49cc-b1d3-5ff6e36e90c8 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "02517d31-0830-4e75-bde3-5f2e939f1328" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 300.562s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.954148] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "02517d31-0830-4e75-bde3-5f2e939f1328" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 102.446s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.954383] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Acquiring lock "02517d31-0830-4e75-bde3-5f2e939f1328-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.954591] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "02517d31-0830-4e75-bde3-5f2e939f1328-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.954758] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "02517d31-0830-4e75-bde3-5f2e939f1328-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.960946] env[68673]: INFO nova.compute.manager [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Terminating instance [ 928.962904] env[68673]: DEBUG nova.compute.manager [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 928.963115] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 928.963381] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4741778e-a2cb-4a27-979a-909ab31408dc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.974018] env[68673]: DEBUG nova.compute.manager [None req-e5db8f92-35ea-4274-aacc-b554471cd536 tempest-ServersTestManualDisk-612812194 tempest-ServersTestManualDisk-612812194-project-member] [instance: ede1744b-c382-4b37-8884-11b73093f632] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 928.978892] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca303c82-7ddb-4ccf-85bf-ac0e6e26e14e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.001145] env[68673]: DEBUG nova.compute.manager [None req-e5db8f92-35ea-4274-aacc-b554471cd536 tempest-ServersTestManualDisk-612812194 tempest-ServersTestManualDisk-612812194-project-member] [instance: ede1744b-c382-4b37-8884-11b73093f632] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 929.014405] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 02517d31-0830-4e75-bde3-5f2e939f1328 could not be found. [ 929.014655] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 929.014836] env[68673]: INFO nova.compute.manager [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Took 0.05 seconds to destroy the instance on the hypervisor. [ 929.015091] env[68673]: DEBUG oslo.service.loopingcall [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.015529] env[68673]: DEBUG nova.compute.manager [-] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 929.015625] env[68673]: DEBUG nova.network.neutron [-] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 929.034280] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e5db8f92-35ea-4274-aacc-b554471cd536 tempest-ServersTestManualDisk-612812194 tempest-ServersTestManualDisk-612812194-project-member] Lock "ede1744b-c382-4b37-8884-11b73093f632" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.160s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.044645] env[68673]: DEBUG nova.compute.manager [None req-451309f2-c3cf-4a9b-aa50-2c018190fce0 tempest-ServersAdmin275Test-1470588584 tempest-ServersAdmin275Test-1470588584-project-member] [instance: 4c074098-9e15-495b-854a-109c8c5d9657] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 929.054823] env[68673]: DEBUG nova.network.neutron [-] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.067639] env[68673]: INFO nova.compute.manager [-] [instance: 02517d31-0830-4e75-bde3-5f2e939f1328] Took 0.05 seconds to deallocate network for instance. [ 929.072812] env[68673]: DEBUG nova.compute.manager [None req-451309f2-c3cf-4a9b-aa50-2c018190fce0 tempest-ServersAdmin275Test-1470588584 tempest-ServersAdmin275Test-1470588584-project-member] [instance: 4c074098-9e15-495b-854a-109c8c5d9657] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 929.093396] env[68673]: DEBUG oslo_concurrency.lockutils [None req-451309f2-c3cf-4a9b-aa50-2c018190fce0 tempest-ServersAdmin275Test-1470588584 tempest-ServersAdmin275Test-1470588584-project-member] Lock "4c074098-9e15-495b-854a-109c8c5d9657" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.574s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.108628] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 929.160854] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ee31b823-7958-4c00-88d4-ab9f2dfca199 tempest-ServerRescueTestJSONUnderV235-1088322600 tempest-ServerRescueTestJSONUnderV235-1088322600-project-member] Lock "02517d31-0830-4e75-bde3-5f2e939f1328" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.166390] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.166761] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.168449] env[68673]: INFO nova.compute.claims [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.523017] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066e0ea1-a8c7-4e1a-b051-89d838adb901 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.532934] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0033fde0-7d9d-4fba-ab6d-cf5e9cebd23d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.564181] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b8b719-d524-43a3-9cc4-78a42f6b5c92 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.572045] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202ccf9e-a06e-400c-aff7-dfeaece66678 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.585978] env[68673]: DEBUG nova.compute.provider_tree [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.594830] env[68673]: DEBUG nova.scheduler.client.report [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 929.609801] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.443s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.610156] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 929.646502] env[68673]: DEBUG nova.compute.utils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.647736] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 929.648474] env[68673]: DEBUG nova.network.neutron [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.657052] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 929.725528] env[68673]: DEBUG nova.policy [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10987b1d09db49219525f9846638c55e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eaf47d7aa4b64fa58f8c1b3f1fcc02df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 929.728269] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 929.753243] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 929.753784] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 929.753784] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.753784] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 929.753939] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.754095] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 929.754305] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 929.754463] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 929.754631] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 929.754793] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 929.754965] env[68673]: DEBUG nova.virt.hardware [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 929.756031] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e763f9-9b70-4122-bf43-c0050c1fd8f0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.764779] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a74e5c-768b-4903-9caa-9086cfa4c833 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.304738] env[68673]: DEBUG nova.network.neutron [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Successfully created port: 25618527-2ec5-4bf7-b197-5b1583547ed0 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.339620] env[68673]: DEBUG nova.network.neutron [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Successfully updated port: 25618527-2ec5-4bf7-b197-5b1583547ed0 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.350689] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "refresh_cache-12c03ca5-3526-4ebe-84af-b2027a6e50ac" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.350829] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "refresh_cache-12c03ca5-3526-4ebe-84af-b2027a6e50ac" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.350975] env[68673]: DEBUG nova.network.neutron [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 931.433581] env[68673]: DEBUG nova.network.neutron [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 931.478933] env[68673]: DEBUG nova.compute.manager [req-08ebe76a-dab1-4c6d-8af8-4cc80bec9149 req-669803d1-a579-42d9-ba7e-32f0281f58e1 service nova] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Received event network-vif-plugged-25618527-2ec5-4bf7-b197-5b1583547ed0 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 931.479170] env[68673]: DEBUG oslo_concurrency.lockutils [req-08ebe76a-dab1-4c6d-8af8-4cc80bec9149 req-669803d1-a579-42d9-ba7e-32f0281f58e1 service nova] Acquiring lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.479367] env[68673]: DEBUG oslo_concurrency.lockutils [req-08ebe76a-dab1-4c6d-8af8-4cc80bec9149 req-669803d1-a579-42d9-ba7e-32f0281f58e1 service nova] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.479526] env[68673]: DEBUG oslo_concurrency.lockutils [req-08ebe76a-dab1-4c6d-8af8-4cc80bec9149 req-669803d1-a579-42d9-ba7e-32f0281f58e1 service nova] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.479686] env[68673]: DEBUG nova.compute.manager [req-08ebe76a-dab1-4c6d-8af8-4cc80bec9149 req-669803d1-a579-42d9-ba7e-32f0281f58e1 service nova] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] No waiting events found dispatching network-vif-plugged-25618527-2ec5-4bf7-b197-5b1583547ed0 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 931.479843] env[68673]: WARNING nova.compute.manager [req-08ebe76a-dab1-4c6d-8af8-4cc80bec9149 req-669803d1-a579-42d9-ba7e-32f0281f58e1 service nova] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Received unexpected event network-vif-plugged-25618527-2ec5-4bf7-b197-5b1583547ed0 for instance with vm_state building and task_state spawning. [ 932.007626] env[68673]: DEBUG nova.network.neutron [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Updating instance_info_cache with network_info: [{"id": "25618527-2ec5-4bf7-b197-5b1583547ed0", "address": "fa:16:3e:d1:82:1c", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25618527-2e", "ovs_interfaceid": "25618527-2ec5-4bf7-b197-5b1583547ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.023123] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "refresh_cache-12c03ca5-3526-4ebe-84af-b2027a6e50ac" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.023431] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Instance network_info: |[{"id": "25618527-2ec5-4bf7-b197-5b1583547ed0", "address": "fa:16:3e:d1:82:1c", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25618527-2e", "ovs_interfaceid": "25618527-2ec5-4bf7-b197-5b1583547ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 932.023840] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:82:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25618527-2ec5-4bf7-b197-5b1583547ed0', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.034272] env[68673]: DEBUG oslo.service.loopingcall [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.034800] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 932.035038] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ffa39ec-02b6-45ee-a9ea-f1f769863e24 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.062561] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.062561] env[68673]: value = "task-3433489" [ 932.062561] env[68673]: _type = "Task" [ 932.062561] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.071328] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433489, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.575212] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433489, 'name': CreateVM_Task, 'duration_secs': 0.309765} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.575516] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 932.576615] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.576832] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.577744] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.578050] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62dcb8ad-5e59-4395-b102-840f7d19a22e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.584054] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 932.584054] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52497bfa-f766-6ac0-3672-c8bdddef9409" [ 932.584054] env[68673]: _type = "Task" [ 932.584054] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.596493] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52497bfa-f766-6ac0-3672-c8bdddef9409, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.098402] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.099342] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.099611] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.481667] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.481667] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.052948] env[68673]: DEBUG oslo_concurrency.lockutils [None req-16562231-9fb8-4183-8eab-bfafc1c51be7 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "74928628-b30b-4e88-a2a4-82797d5c7965" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.053264] env[68673]: DEBUG oslo_concurrency.lockutils [None req-16562231-9fb8-4183-8eab-bfafc1c51be7 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "74928628-b30b-4e88-a2a4-82797d5c7965" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.059630] env[68673]: DEBUG oslo_concurrency.lockutils [None req-83807532-cf16-4304-ba06-6cb8b55374c6 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] Acquiring lock "43a38849-0ca1-4b73-b677-ca2baacff863" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.059960] env[68673]: DEBUG oslo_concurrency.lockutils [None req-83807532-cf16-4304-ba06-6cb8b55374c6 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] Lock "43a38849-0ca1-4b73-b677-ca2baacff863" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.094974] env[68673]: DEBUG nova.compute.manager [req-cf0a693a-3d3b-486c-9261-1fa6556c9454 req-5515684e-6ba7-4758-87d9-f4abdb13584f service nova] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Received event network-changed-25618527-2ec5-4bf7-b197-5b1583547ed0 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 934.095178] env[68673]: DEBUG nova.compute.manager [req-cf0a693a-3d3b-486c-9261-1fa6556c9454 req-5515684e-6ba7-4758-87d9-f4abdb13584f service nova] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Refreshing instance network info cache due to event network-changed-25618527-2ec5-4bf7-b197-5b1583547ed0. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 934.095394] env[68673]: DEBUG oslo_concurrency.lockutils [req-cf0a693a-3d3b-486c-9261-1fa6556c9454 req-5515684e-6ba7-4758-87d9-f4abdb13584f service nova] Acquiring lock "refresh_cache-12c03ca5-3526-4ebe-84af-b2027a6e50ac" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.095542] env[68673]: DEBUG oslo_concurrency.lockutils [req-cf0a693a-3d3b-486c-9261-1fa6556c9454 req-5515684e-6ba7-4758-87d9-f4abdb13584f service nova] Acquired lock "refresh_cache-12c03ca5-3526-4ebe-84af-b2027a6e50ac" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.095697] env[68673]: DEBUG nova.network.neutron [req-cf0a693a-3d3b-486c-9261-1fa6556c9454 req-5515684e-6ba7-4758-87d9-f4abdb13584f service nova] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Refreshing network info cache for port 25618527-2ec5-4bf7-b197-5b1583547ed0 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 934.480239] env[68673]: DEBUG nova.network.neutron [req-cf0a693a-3d3b-486c-9261-1fa6556c9454 req-5515684e-6ba7-4758-87d9-f4abdb13584f service nova] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Updated VIF entry in instance network info cache for port 25618527-2ec5-4bf7-b197-5b1583547ed0. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 934.480602] env[68673]: DEBUG nova.network.neutron [req-cf0a693a-3d3b-486c-9261-1fa6556c9454 req-5515684e-6ba7-4758-87d9-f4abdb13584f service nova] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Updating instance_info_cache with network_info: [{"id": "25618527-2ec5-4bf7-b197-5b1583547ed0", "address": "fa:16:3e:d1:82:1c", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25618527-2e", "ovs_interfaceid": "25618527-2ec5-4bf7-b197-5b1583547ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.491840] env[68673]: DEBUG oslo_concurrency.lockutils [req-cf0a693a-3d3b-486c-9261-1fa6556c9454 req-5515684e-6ba7-4758-87d9-f4abdb13584f service nova] Releasing lock "refresh_cache-12c03ca5-3526-4ebe-84af-b2027a6e50ac" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.763044] env[68673]: DEBUG oslo_concurrency.lockutils [None req-561aa1bd-8b8a-4948-978f-1f004bf2c90b tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "f1f814f6-3e68-4729-8487-02e10c055cfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.763293] env[68673]: DEBUG oslo_concurrency.lockutils [None req-561aa1bd-8b8a-4948-978f-1f004bf2c90b tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "f1f814f6-3e68-4729-8487-02e10c055cfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.889443] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0182aa04-9446-4762-90b7-be01034a8379 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] Acquiring lock "55fc09e9-581f-4ef2-a513-1b0c2f33dd75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.889443] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0182aa04-9446-4762-90b7-be01034a8379 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] Lock "55fc09e9-581f-4ef2-a513-1b0c2f33dd75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.779749] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.774596] env[68673]: WARNING oslo_vmware.rw_handles [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 940.774596] env[68673]: ERROR oslo_vmware.rw_handles [ 940.775572] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore2 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 940.776695] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 940.777015] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Copying Virtual Disk [datastore2] vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore2] vmware_temp/a4ca1db7-194b-460b-99fd-43616ccef737/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 940.777378] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00a560bb-0d57-486f-adb6-5b72ebce12e4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.782772] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.782925] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 940.783115] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.785720] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Waiting for the task: (returnval){ [ 940.785720] env[68673]: value = "task-3433490" [ 940.785720] env[68673]: _type = "Task" [ 940.785720] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.796391] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Task: {'id': task-3433490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.797853] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.798095] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.798269] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.798451] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 940.799475] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11483adb-e1de-42b4-8103-46b42d74d8b7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.806891] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82d2a76-9446-4ddc-a25d-c2e824570187 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.820757] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1ca7bf-27de-4e4e-bf0c-2fa71b0b47c7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.827009] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebcbbff-9fa7-4e46-bce9-c62690cd8427 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.855767] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180845MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 940.855934] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.856147] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.932744] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 377657cd-9913-49ec-a0f8-a701655ff68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.933067] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.933305] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.933520] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.933728] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.933939] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.934167] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.934378] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.934701] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 50cc30fa-01f0-441d-af41-76c5273123af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.934930] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.946182] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.957829] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.967830] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.978034] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.987517] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 52738695-8a04-457a-a58e-46c214422409 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.998026] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 5b2c34cf-302d-4898-b3f5-d0feb6c1bcc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.006989] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 78561ca7-b99f-414f-a8f9-1abf127ae3be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.016736] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 187f308b-b2b4-4b60-b490-71b8a74f916e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.026608] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance a31635f4-c7c8-4498-b825-b3a159400096 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.035898] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance a4561267-9893-4c7f-b3cb-6887cf740cd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.045562] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 84ce4cab-05fb-46fe-b59f-c2851812c5dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.055207] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c86a2c4a-17c4-48da-aafc-ff2aa2f3699a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.065231] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 11d7e9e7-66ae-4e0c-abad-9542d6716ba6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.075753] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2538fc39-0bdf-45d4-a0c2-5a0c61b6b39e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.085654] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.095475] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 43a38849-0ca1-4b73-b677-ca2baacff863 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.105006] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 74928628-b30b-4e88-a2a4-82797d5c7965 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.114920] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f814f6-3e68-4729-8487-02e10c055cfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.124207] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 55fc09e9-581f-4ef2-a513-1b0c2f33dd75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 941.124435] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 941.124582] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 941.295876] env[68673]: DEBUG oslo_vmware.exceptions [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 941.296204] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.296749] env[68673]: ERROR nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.296749] env[68673]: Faults: ['InvalidArgument'] [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Traceback (most recent call last): [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] yield resources [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] self.driver.spawn(context, instance, image_meta, [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] self._fetch_image_if_missing(context, vi) [ 941.296749] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] image_cache(vi, tmp_image_ds_loc) [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] vm_util.copy_virtual_disk( [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] session._wait_for_task(vmdk_copy_task) [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] return self.wait_for_task(task_ref) [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] return evt.wait() [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] result = hub.switch() [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 941.297152] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] return self.greenlet.switch() [ 941.297723] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 941.297723] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] self.f(*self.args, **self.kw) [ 941.297723] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 941.297723] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] raise exceptions.translate_fault(task_info.error) [ 941.297723] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.297723] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Faults: ['InvalidArgument'] [ 941.297723] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] [ 941.297723] env[68673]: INFO nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Terminating instance [ 941.301301] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 941.301491] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 941.302287] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e226315-4b6f-47fb-9b21-4f62c9bfa1f6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.308974] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 941.309192] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-faa6e39c-56b9-4066-b700-62b969741a49 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.377024] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.377246] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Deleting contents of the VM from datastore datastore2 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.377437] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Deleting the datastore file [datastore2] 50cc30fa-01f0-441d-af41-76c5273123af {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.379899] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fcd258ce-bd28-4c56-93f1-6c2659e5301d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.385809] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Waiting for the task: (returnval){ [ 941.385809] env[68673]: value = "task-3433492" [ 941.385809] env[68673]: _type = "Task" [ 941.385809] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.393504] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Task: {'id': task-3433492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.486666] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d110a3c1-0ad6-4b5c-84c9-1200a787c2f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.494177] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c57836-25e3-42c5-9cf9-fe5ba23e0a86 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.524667] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1021be-3653-4805-a0a5-30605ee8990f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.531849] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1e7a8c-2287-4258-86a8-89ab994f6ac5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.544968] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.553432] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 941.567326] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 941.567466] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.711s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.895867] env[68673]: DEBUG oslo_vmware.api [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Task: {'id': task-3433492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081993} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.896184] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.896289] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Deleted contents of the VM from datastore datastore2 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 941.897036] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.897036] env[68673]: INFO nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Took 0.60 seconds to destroy the instance on the hypervisor. [ 941.898825] env[68673]: DEBUG nova.compute.claims [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 941.899009] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.899232] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.262199] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8aee378-39fb-424b-a11a-234880c6cfab {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.270082] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4a2815-f428-49b3-90a6-1275fef3b729 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.300009] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e5f094-6b1d-4454-9abc-dc3dcc246a89 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.306818] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28af8ff-e839-4a11-8647-3ef6c864e3ba {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.320521] env[68673]: DEBUG nova.compute.provider_tree [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.328758] env[68673]: DEBUG nova.scheduler.client.report [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 942.342130] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.443s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.342658] env[68673]: ERROR nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.342658] env[68673]: Faults: ['InvalidArgument'] [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Traceback (most recent call last): [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] self.driver.spawn(context, instance, image_meta, [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] self._fetch_image_if_missing(context, vi) [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] image_cache(vi, tmp_image_ds_loc) [ 942.342658] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] vm_util.copy_virtual_disk( [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] session._wait_for_task(vmdk_copy_task) [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] return self.wait_for_task(task_ref) [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] return evt.wait() [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] result = hub.switch() [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] return self.greenlet.switch() [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 942.343043] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] self.f(*self.args, **self.kw) [ 942.343397] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 942.343397] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] raise exceptions.translate_fault(task_info.error) [ 942.343397] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.343397] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Faults: ['InvalidArgument'] [ 942.343397] env[68673]: ERROR nova.compute.manager [instance: 50cc30fa-01f0-441d-af41-76c5273123af] [ 942.343397] env[68673]: DEBUG nova.compute.utils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 942.344716] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Build of instance 50cc30fa-01f0-441d-af41-76c5273123af was re-scheduled: A specified parameter was not correct: fileType [ 942.344716] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 942.345097] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 942.345264] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 942.345428] env[68673]: DEBUG nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 942.345583] env[68673]: DEBUG nova.network.neutron [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.568109] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.727020] env[68673]: DEBUG nova.network.neutron [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.736469] env[68673]: INFO nova.compute.manager [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Took 0.39 seconds to deallocate network for instance. [ 942.785901] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.850573] env[68673]: INFO nova.scheduler.client.report [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Deleted allocations for instance 50cc30fa-01f0-441d-af41-76c5273123af [ 942.873305] env[68673]: DEBUG oslo_concurrency.lockutils [None req-67eb5f5b-cbe9-4c94-8f9f-99fcbf808d7b tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "50cc30fa-01f0-441d-af41-76c5273123af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 256.825s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.874454] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "50cc30fa-01f0-441d-af41-76c5273123af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 58.200s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.874689] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "50cc30fa-01f0-441d-af41-76c5273123af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.874895] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "50cc30fa-01f0-441d-af41-76c5273123af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.875096] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "50cc30fa-01f0-441d-af41-76c5273123af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.880201] env[68673]: INFO nova.compute.manager [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Terminating instance [ 942.881876] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquiring lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.882570] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Acquired lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.882570] env[68673]: DEBUG nova.network.neutron [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 942.886700] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 942.930447] env[68673]: DEBUG nova.network.neutron [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 942.945038] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.945295] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.946849] env[68673]: INFO nova.compute.claims [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.132355] env[68673]: DEBUG nova.network.neutron [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.144789] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Releasing lock "refresh_cache-50cc30fa-01f0-441d-af41-76c5273123af" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.145125] env[68673]: DEBUG nova.compute.manager [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 943.145325] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 943.148361] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6da0054-5679-4953-bbcf-b1dbccec07c2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.159024] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38a39f4-046d-4e34-89d9-fe1d57870018 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.191243] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 50cc30fa-01f0-441d-af41-76c5273123af could not be found. [ 943.191491] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 943.191684] env[68673]: INFO nova.compute.manager [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Took 0.05 seconds to destroy the instance on the hypervisor. [ 943.191916] env[68673]: DEBUG oslo.service.loopingcall [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.194515] env[68673]: DEBUG nova.compute.manager [-] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 943.194619] env[68673]: DEBUG nova.network.neutron [-] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 943.214052] env[68673]: DEBUG nova.network.neutron [-] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 943.224967] env[68673]: DEBUG nova.network.neutron [-] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.235027] env[68673]: INFO nova.compute.manager [-] [instance: 50cc30fa-01f0-441d-af41-76c5273123af] Took 0.04 seconds to deallocate network for instance. [ 943.359748] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a70eae68-5c3e-46f6-a88f-e687d352ffd0 tempest-AttachInterfacesUnderV243Test-425728502 tempest-AttachInterfacesUnderV243Test-425728502-project-member] Lock "50cc30fa-01f0-441d-af41-76c5273123af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.485s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.407101] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7461e3a7-989b-479c-a748-8aaf74185186 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.415254] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280ef3c7-78f0-4edb-96d5-797e6b555d25 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.445365] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ad1379-cd18-4fd8-bc37-1067a5737e99 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.452827] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dd8e7b-2a6d-4e60-8cd1-d9f160cdae1c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.466072] env[68673]: DEBUG nova.compute.provider_tree [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.475598] env[68673]: DEBUG nova.scheduler.client.report [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 943.493597] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.548s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.494180] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 943.538087] env[68673]: DEBUG nova.compute.utils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 943.539432] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 943.539681] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 943.550621] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 943.631818] env[68673]: DEBUG nova.policy [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a38ca298f386427a86500d64aff00662', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a737922374e84c79992d4900249a1023', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 943.636961] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 943.666346] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 943.666596] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 943.666755] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.666934] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 943.667090] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.667237] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 943.667441] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 943.667589] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 943.667750] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 943.667912] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 943.668185] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.669129] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48497116-0063-4b76-8e5f-63b460ffa8c9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.678026] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28cea0b-36ca-4629-a171-3a93b9283a51 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.785188] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.785188] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 943.785188] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 943.816706] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816706] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816706] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816706] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816706] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816906] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816906] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816906] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816906] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.816906] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 943.817103] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 943.819018] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.819018] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 944.330189] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Successfully created port: af5399f4-90dc-4803-b28e-9f4a7ef5abba {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.785794] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.230524] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Successfully updated port: af5399f4-90dc-4803-b28e-9f4a7ef5abba {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.247255] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "refresh_cache-04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.247255] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquired lock "refresh_cache-04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.247255] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.296568] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.301831] env[68673]: DEBUG nova.compute.manager [req-f653100f-157d-4242-9d6a-99798ffac5dd req-c99622b2-0b5a-4950-8a31-8315951a572c service nova] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Received event network-vif-plugged-af5399f4-90dc-4803-b28e-9f4a7ef5abba {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 945.302082] env[68673]: DEBUG oslo_concurrency.lockutils [req-f653100f-157d-4242-9d6a-99798ffac5dd req-c99622b2-0b5a-4950-8a31-8315951a572c service nova] Acquiring lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.302307] env[68673]: DEBUG oslo_concurrency.lockutils [req-f653100f-157d-4242-9d6a-99798ffac5dd req-c99622b2-0b5a-4950-8a31-8315951a572c service nova] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.302493] env[68673]: DEBUG oslo_concurrency.lockutils [req-f653100f-157d-4242-9d6a-99798ffac5dd req-c99622b2-0b5a-4950-8a31-8315951a572c service nova] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.302661] env[68673]: DEBUG nova.compute.manager [req-f653100f-157d-4242-9d6a-99798ffac5dd req-c99622b2-0b5a-4950-8a31-8315951a572c service nova] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] No waiting events found dispatching network-vif-plugged-af5399f4-90dc-4803-b28e-9f4a7ef5abba {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 945.302833] env[68673]: WARNING nova.compute.manager [req-f653100f-157d-4242-9d6a-99798ffac5dd req-c99622b2-0b5a-4950-8a31-8315951a572c service nova] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Received unexpected event network-vif-plugged-af5399f4-90dc-4803-b28e-9f4a7ef5abba for instance with vm_state building and task_state spawning. [ 945.565833] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Updating instance_info_cache with network_info: [{"id": "af5399f4-90dc-4803-b28e-9f4a7ef5abba", "address": "fa:16:3e:19:de:41", "network": {"id": "80c9ae43-9c33-4a89-9b2d-430afd6b467d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1931832163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a737922374e84c79992d4900249a1023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf5399f4-90", "ovs_interfaceid": "af5399f4-90dc-4803-b28e-9f4a7ef5abba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.581203] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Releasing lock "refresh_cache-04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.581522] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Instance network_info: |[{"id": "af5399f4-90dc-4803-b28e-9f4a7ef5abba", "address": "fa:16:3e:19:de:41", "network": {"id": "80c9ae43-9c33-4a89-9b2d-430afd6b467d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1931832163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a737922374e84c79992d4900249a1023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf5399f4-90", "ovs_interfaceid": "af5399f4-90dc-4803-b28e-9f4a7ef5abba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 945.581945] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:de:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7f41333-42ee-47f3-936c-d6701ab786d2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af5399f4-90dc-4803-b28e-9f4a7ef5abba', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 945.590433] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Creating folder: Project (a737922374e84c79992d4900249a1023). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 945.591026] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5263b42-cbf5-4eac-b87d-fb30187b714c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.603045] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Created folder: Project (a737922374e84c79992d4900249a1023) in parent group-v685311. [ 945.603262] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Creating folder: Instances. Parent ref: group-v685364. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 945.603569] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e53196db-c507-4d25-9ae3-b9d3795e7e3c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.613917] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Created folder: Instances in parent group-v685364. [ 945.614181] env[68673]: DEBUG oslo.service.loopingcall [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.614374] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 945.614593] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-064e3e4d-03a8-4694-b05b-2fe80df89c8b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.633913] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 945.633913] env[68673]: value = "task-3433495" [ 945.633913] env[68673]: _type = "Task" [ 945.633913] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.641409] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433495, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.780553] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.145969] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433495, 'name': CreateVM_Task, 'duration_secs': 0.309548} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.146171] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 946.146843] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.147022] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.148215] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 946.148494] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac8d0f47-47c8-4b4c-b3ae-5f1a68ffabc4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.154243] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for the task: (returnval){ [ 946.154243] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]529152a1-9b25-5ecc-b25d-032c365f4ba4" [ 946.154243] env[68673]: _type = "Task" [ 946.154243] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.162629] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]529152a1-9b25-5ecc-b25d-032c365f4ba4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.666652] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.666979] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 946.668036] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.373147] env[68673]: DEBUG nova.compute.manager [req-fcc6bbc9-ac99-4f4d-802f-76dfc6268099 req-f8b0b1e4-c7f3-452d-a683-f923000cbdbb service nova] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Received event network-changed-af5399f4-90dc-4803-b28e-9f4a7ef5abba {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 947.373294] env[68673]: DEBUG nova.compute.manager [req-fcc6bbc9-ac99-4f4d-802f-76dfc6268099 req-f8b0b1e4-c7f3-452d-a683-f923000cbdbb service nova] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Refreshing instance network info cache due to event network-changed-af5399f4-90dc-4803-b28e-9f4a7ef5abba. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 947.373512] env[68673]: DEBUG oslo_concurrency.lockutils [req-fcc6bbc9-ac99-4f4d-802f-76dfc6268099 req-f8b0b1e4-c7f3-452d-a683-f923000cbdbb service nova] Acquiring lock "refresh_cache-04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.373664] env[68673]: DEBUG oslo_concurrency.lockutils [req-fcc6bbc9-ac99-4f4d-802f-76dfc6268099 req-f8b0b1e4-c7f3-452d-a683-f923000cbdbb service nova] Acquired lock "refresh_cache-04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.373824] env[68673]: DEBUG nova.network.neutron [req-fcc6bbc9-ac99-4f4d-802f-76dfc6268099 req-f8b0b1e4-c7f3-452d-a683-f923000cbdbb service nova] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Refreshing network info cache for port af5399f4-90dc-4803-b28e-9f4a7ef5abba {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 947.727835] env[68673]: DEBUG nova.network.neutron [req-fcc6bbc9-ac99-4f4d-802f-76dfc6268099 req-f8b0b1e4-c7f3-452d-a683-f923000cbdbb service nova] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Updated VIF entry in instance network info cache for port af5399f4-90dc-4803-b28e-9f4a7ef5abba. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 947.728225] env[68673]: DEBUG nova.network.neutron [req-fcc6bbc9-ac99-4f4d-802f-76dfc6268099 req-f8b0b1e4-c7f3-452d-a683-f923000cbdbb service nova] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Updating instance_info_cache with network_info: [{"id": "af5399f4-90dc-4803-b28e-9f4a7ef5abba", "address": "fa:16:3e:19:de:41", "network": {"id": "80c9ae43-9c33-4a89-9b2d-430afd6b467d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1931832163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a737922374e84c79992d4900249a1023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf5399f4-90", "ovs_interfaceid": "af5399f4-90dc-4803-b28e-9f4a7ef5abba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.745489] env[68673]: DEBUG oslo_concurrency.lockutils [req-fcc6bbc9-ac99-4f4d-802f-76dfc6268099 req-f8b0b1e4-c7f3-452d-a683-f923000cbdbb service nova] Releasing lock "refresh_cache-04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.855438] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9dac3052-c4b1-44c9-9049-392057f8f2d9 tempest-AttachInterfacesV270Test-2053391187 tempest-AttachInterfacesV270Test-2053391187-project-member] Acquiring lock "7d00a8a9-3ddc-4555-9025-9d06479b34dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.855961] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9dac3052-c4b1-44c9-9049-392057f8f2d9 tempest-AttachInterfacesV270Test-2053391187 tempest-AttachInterfacesV270Test-2053391187-project-member] Lock "7d00a8a9-3ddc-4555-9025-9d06479b34dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.286337] env[68673]: WARNING oslo_vmware.rw_handles [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 976.286337] env[68673]: ERROR oslo_vmware.rw_handles [ 976.287029] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 976.288820] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 976.289092] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Copying Virtual Disk [datastore1] vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/00f072ab-080f-4c74-a95d-78b79d3dce7d/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 976.289381] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a4f2a7a-c623-4759-8996-303f686b4838 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.297895] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Waiting for the task: (returnval){ [ 976.297895] env[68673]: value = "task-3433496" [ 976.297895] env[68673]: _type = "Task" [ 976.297895] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.306496] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Task: {'id': task-3433496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.741764] env[68673]: DEBUG oslo_concurrency.lockutils [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.808854] env[68673]: DEBUG oslo_vmware.exceptions [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 976.809156] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.809749] env[68673]: ERROR nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 976.809749] env[68673]: Faults: ['InvalidArgument'] [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Traceback (most recent call last): [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] yield resources [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] self.driver.spawn(context, instance, image_meta, [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] self._fetch_image_if_missing(context, vi) [ 976.809749] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] image_cache(vi, tmp_image_ds_loc) [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] vm_util.copy_virtual_disk( [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] session._wait_for_task(vmdk_copy_task) [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] return self.wait_for_task(task_ref) [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] return evt.wait() [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] result = hub.switch() [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 976.810184] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] return self.greenlet.switch() [ 976.810609] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 976.810609] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] self.f(*self.args, **self.kw) [ 976.810609] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 976.810609] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] raise exceptions.translate_fault(task_info.error) [ 976.810609] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 976.810609] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Faults: ['InvalidArgument'] [ 976.810609] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] [ 976.810609] env[68673]: INFO nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Terminating instance [ 976.811699] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.811913] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.812186] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-452ebad4-9309-45a8-af41-2febce807f83 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.815855] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 976.816056] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 976.816890] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0999fafa-a90e-4247-84a2-705565b78def {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.823763] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 976.824029] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39ac7a92-9271-408b-a39e-b73fb469b29c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.826277] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.826559] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 976.827641] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cd20c37-426e-46a1-a5f3-0eff9393cb52 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.834247] env[68673]: DEBUG oslo_vmware.api [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for the task: (returnval){ [ 976.834247] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524e9381-296d-13e8-d37e-758179ad2ed8" [ 976.834247] env[68673]: _type = "Task" [ 976.834247] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.847182] env[68673]: DEBUG oslo_vmware.api [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524e9381-296d-13e8-d37e-758179ad2ed8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.894767] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 976.894997] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 976.895207] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Deleting the datastore file [datastore1] 377657cd-9913-49ec-a0f8-a701655ff68d {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.895472] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c82dd35-8a12-453a-a4fc-9e7a4cf6c17e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.901519] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Waiting for the task: (returnval){ [ 976.901519] env[68673]: value = "task-3433498" [ 976.901519] env[68673]: _type = "Task" [ 976.901519] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.909963] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Task: {'id': task-3433498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.347032] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 977.347379] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Creating directory with path [datastore1] vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.347573] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8cb24e7-de69-4132-a144-eb8605db3ffd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.360025] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Created directory with path [datastore1] vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.360025] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Fetch image to [datastore1] vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 977.360025] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 977.360641] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad93bba-c606-4c8c-adc7-65f514114f94 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.367318] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1dcdbb-28b4-4cea-bd5f-67431bcc4a0e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.376250] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c504d7-c78c-40e7-b75d-a98ddc605103 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.409031] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c039497-a416-4e60-a622-956d6a4c0b90 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.415647] env[68673]: DEBUG oslo_vmware.api [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Task: {'id': task-3433498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078671} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.417048] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.417244] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 977.417417] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 977.417591] env[68673]: INFO nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 977.419343] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f00ada1e-91c0-4ed7-b2c5-bd558623c789 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.421257] env[68673]: DEBUG nova.compute.claims [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 977.421439] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.421648] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.442619] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 977.501749] env[68673]: DEBUG oslo_vmware.rw_handles [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 977.562299] env[68673]: DEBUG oslo_vmware.rw_handles [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 977.562490] env[68673]: DEBUG oslo_vmware.rw_handles [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 977.859840] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5caff097-5bf0-46b4-89d8-0580a379f0ca {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.867635] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb31ea2-d927-40f9-b2c7-82497bd5e6d9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.897794] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd07d55b-90a8-46ce-abac-bdf938f3e57a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.904767] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35252816-2ba9-43b6-ac39-d7e1fa04b757 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.917401] env[68673]: DEBUG nova.compute.provider_tree [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.926208] env[68673]: DEBUG nova.scheduler.client.report [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 977.941961] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.520s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.942503] env[68673]: ERROR nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 977.942503] env[68673]: Faults: ['InvalidArgument'] [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Traceback (most recent call last): [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] self.driver.spawn(context, instance, image_meta, [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] self._fetch_image_if_missing(context, vi) [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] image_cache(vi, tmp_image_ds_loc) [ 977.942503] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] vm_util.copy_virtual_disk( [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] session._wait_for_task(vmdk_copy_task) [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] return self.wait_for_task(task_ref) [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] return evt.wait() [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] result = hub.switch() [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] return self.greenlet.switch() [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 977.942890] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] self.f(*self.args, **self.kw) [ 977.943335] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 977.943335] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] raise exceptions.translate_fault(task_info.error) [ 977.943335] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 977.943335] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Faults: ['InvalidArgument'] [ 977.943335] env[68673]: ERROR nova.compute.manager [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] [ 977.943335] env[68673]: DEBUG nova.compute.utils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 977.944617] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Build of instance 377657cd-9913-49ec-a0f8-a701655ff68d was re-scheduled: A specified parameter was not correct: fileType [ 977.944617] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 977.945018] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 977.945191] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 977.945348] env[68673]: DEBUG nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 977.945511] env[68673]: DEBUG nova.network.neutron [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 978.305790] env[68673]: DEBUG nova.network.neutron [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.319288] env[68673]: INFO nova.compute.manager [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Took 0.37 seconds to deallocate network for instance. [ 978.413446] env[68673]: INFO nova.scheduler.client.report [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Deleted allocations for instance 377657cd-9913-49ec-a0f8-a701655ff68d [ 978.439587] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8249298f-dff8-4fb9-9c62-dacd410528cb tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "377657cd-9913-49ec-a0f8-a701655ff68d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 340.243s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.440770] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "377657cd-9913-49ec-a0f8-a701655ff68d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 141.512s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.441569] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Acquiring lock "377657cd-9913-49ec-a0f8-a701655ff68d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.441569] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "377657cd-9913-49ec-a0f8-a701655ff68d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.441569] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "377657cd-9913-49ec-a0f8-a701655ff68d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.444935] env[68673]: INFO nova.compute.manager [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Terminating instance [ 978.447075] env[68673]: DEBUG nova.compute.manager [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 978.447277] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 978.447678] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c1aaff5-e214-4dcc-96a8-06ffb41607b5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.453441] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 978.460010] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994ffb1c-b71c-4ce9-b98e-22ac6726aa55 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.491945] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 377657cd-9913-49ec-a0f8-a701655ff68d could not be found. [ 978.492160] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 978.492353] env[68673]: INFO nova.compute.manager [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 978.492583] env[68673]: DEBUG oslo.service.loopingcall [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.495076] env[68673]: DEBUG nova.compute.manager [-] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 978.495179] env[68673]: DEBUG nova.network.neutron [-] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 978.510486] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.510725] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.512220] env[68673]: INFO nova.compute.claims [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.531670] env[68673]: DEBUG nova.network.neutron [-] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.550365] env[68673]: INFO nova.compute.manager [-] [instance: 377657cd-9913-49ec-a0f8-a701655ff68d] Took 0.05 seconds to deallocate network for instance. [ 978.659106] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8cc36d06-a83e-4d87-b759-3b652b406846 tempest-TenantUsagesTestJSON-436655659 tempest-TenantUsagesTestJSON-436655659-project-member] Lock "377657cd-9913-49ec-a0f8-a701655ff68d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.218s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.926155] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a24412-8878-4d1e-bdd8-333635a4ab6a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.933814] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6bdc1a-6ace-4e0d-86ea-ea54ebecb2c5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.966074] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cb117e-df67-4f27-9b7a-02136f5e1414 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.973051] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba32fb8b-29dc-4ab2-8ced-5574c5280e65 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.985879] env[68673]: DEBUG nova.compute.provider_tree [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.994890] env[68673]: DEBUG nova.scheduler.client.report [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 979.010203] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.499s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.010674] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 979.041073] env[68673]: DEBUG nova.compute.utils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.042497] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 979.042665] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 979.051088] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 979.106441] env[68673]: DEBUG nova.policy [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a38ca298f386427a86500d64aff00662', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a737922374e84c79992d4900249a1023', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 979.113423] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 979.137926] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 979.138207] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 979.138370] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.138555] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 979.138705] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.138851] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 979.139068] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 979.139234] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 979.139404] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 979.139568] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 979.139744] env[68673]: DEBUG nova.virt.hardware [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 979.140619] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451acb2b-63fb-4038-843c-ef424b0062b4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.148726] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2cad76-e768-42b7-bd6f-3d0c7937534f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.511096] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Successfully created port: 98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.490422] env[68673]: DEBUG nova.compute.manager [req-73f1849d-5ff2-45f1-a002-ea9b350dde8b req-63310655-6757-4e1c-9bc9-1454b66be5f0 service nova] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Received event network-vif-plugged-98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 980.490671] env[68673]: DEBUG oslo_concurrency.lockutils [req-73f1849d-5ff2-45f1-a002-ea9b350dde8b req-63310655-6757-4e1c-9bc9-1454b66be5f0 service nova] Acquiring lock "f4e540de-0b46-424b-894d-8ec0416d9828-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.490874] env[68673]: DEBUG oslo_concurrency.lockutils [req-73f1849d-5ff2-45f1-a002-ea9b350dde8b req-63310655-6757-4e1c-9bc9-1454b66be5f0 service nova] Lock "f4e540de-0b46-424b-894d-8ec0416d9828-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.491057] env[68673]: DEBUG oslo_concurrency.lockutils [req-73f1849d-5ff2-45f1-a002-ea9b350dde8b req-63310655-6757-4e1c-9bc9-1454b66be5f0 service nova] Lock "f4e540de-0b46-424b-894d-8ec0416d9828-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.491218] env[68673]: DEBUG nova.compute.manager [req-73f1849d-5ff2-45f1-a002-ea9b350dde8b req-63310655-6757-4e1c-9bc9-1454b66be5f0 service nova] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] No waiting events found dispatching network-vif-plugged-98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 980.491383] env[68673]: WARNING nova.compute.manager [req-73f1849d-5ff2-45f1-a002-ea9b350dde8b req-63310655-6757-4e1c-9bc9-1454b66be5f0 service nova] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Received unexpected event network-vif-plugged-98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3 for instance with vm_state building and task_state spawning. [ 980.587876] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Successfully updated port: 98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.602174] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "refresh_cache-f4e540de-0b46-424b-894d-8ec0416d9828" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.602174] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquired lock "refresh_cache-f4e540de-0b46-424b-894d-8ec0416d9828" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.602174] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.677703] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 980.925081] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Updating instance_info_cache with network_info: [{"id": "98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3", "address": "fa:16:3e:34:53:a8", "network": {"id": "80c9ae43-9c33-4a89-9b2d-430afd6b467d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1931832163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a737922374e84c79992d4900249a1023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98d6e7ad-7c", "ovs_interfaceid": "98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.937515] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Releasing lock "refresh_cache-f4e540de-0b46-424b-894d-8ec0416d9828" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.937814] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Instance network_info: |[{"id": "98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3", "address": "fa:16:3e:34:53:a8", "network": {"id": "80c9ae43-9c33-4a89-9b2d-430afd6b467d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1931832163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a737922374e84c79992d4900249a1023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98d6e7ad-7c", "ovs_interfaceid": "98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 980.938261] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:53:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7f41333-42ee-47f3-936c-d6701ab786d2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.946613] env[68673]: DEBUG oslo.service.loopingcall [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.947107] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 980.947347] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5eac1193-98b0-42d0-af00-ff625478a365 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.968964] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.968964] env[68673]: value = "task-3433499" [ 980.968964] env[68673]: _type = "Task" [ 980.968964] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.979436] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433499, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.480024] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433499, 'name': CreateVM_Task, 'duration_secs': 0.308507} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.480208] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 981.480856] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.481035] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.481412] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 981.481660] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a29bc20-1f0f-415c-b719-be0d3585e2c8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.486232] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for the task: (returnval){ [ 981.486232] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52f549cd-4dc9-cf6e-748d-e42b4fc9e4b8" [ 981.486232] env[68673]: _type = "Task" [ 981.486232] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.495058] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52f549cd-4dc9-cf6e-748d-e42b4fc9e4b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.996955] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.997270] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.997512] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.518850] env[68673]: DEBUG nova.compute.manager [req-16e56409-312d-4bb7-b476-80c4af46ba5f req-4231a416-81cc-497b-a8cf-577cb56add39 service nova] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Received event network-changed-98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 982.519072] env[68673]: DEBUG nova.compute.manager [req-16e56409-312d-4bb7-b476-80c4af46ba5f req-4231a416-81cc-497b-a8cf-577cb56add39 service nova] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Refreshing instance network info cache due to event network-changed-98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 982.519285] env[68673]: DEBUG oslo_concurrency.lockutils [req-16e56409-312d-4bb7-b476-80c4af46ba5f req-4231a416-81cc-497b-a8cf-577cb56add39 service nova] Acquiring lock "refresh_cache-f4e540de-0b46-424b-894d-8ec0416d9828" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.519420] env[68673]: DEBUG oslo_concurrency.lockutils [req-16e56409-312d-4bb7-b476-80c4af46ba5f req-4231a416-81cc-497b-a8cf-577cb56add39 service nova] Acquired lock "refresh_cache-f4e540de-0b46-424b-894d-8ec0416d9828" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.519577] env[68673]: DEBUG nova.network.neutron [req-16e56409-312d-4bb7-b476-80c4af46ba5f req-4231a416-81cc-497b-a8cf-577cb56add39 service nova] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Refreshing network info cache for port 98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 982.892045] env[68673]: DEBUG nova.network.neutron [req-16e56409-312d-4bb7-b476-80c4af46ba5f req-4231a416-81cc-497b-a8cf-577cb56add39 service nova] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Updated VIF entry in instance network info cache for port 98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 982.892426] env[68673]: DEBUG nova.network.neutron [req-16e56409-312d-4bb7-b476-80c4af46ba5f req-4231a416-81cc-497b-a8cf-577cb56add39 service nova] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Updating instance_info_cache with network_info: [{"id": "98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3", "address": "fa:16:3e:34:53:a8", "network": {"id": "80c9ae43-9c33-4a89-9b2d-430afd6b467d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1931832163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a737922374e84c79992d4900249a1023", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98d6e7ad-7c", "ovs_interfaceid": "98d6e7ad-7cb3-4249-8e48-0d3f17bdf2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.902847] env[68673]: DEBUG oslo_concurrency.lockutils [req-16e56409-312d-4bb7-b476-80c4af46ba5f req-4231a416-81cc-497b-a8cf-577cb56add39 service nova] Releasing lock "refresh_cache-f4e540de-0b46-424b-894d-8ec0416d9828" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.809022] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.782993] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.783305] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1000.783540] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.798114] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.798335] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.798499] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.798659] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1000.799731] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655d4fce-3d8c-4a78-b0fc-482f2498e5b4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.808429] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b707339a-1626-4af2-810e-bbdf01582409 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.822131] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34dd422-3818-47e1-b8bd-31ed5e9c9316 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.828392] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d64067d-344a-464d-8f5c-62d2d261fdb2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.858931] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180854MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1000.859084] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.859337] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.931279] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.931435] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.931562] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.931685] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.931802] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.931918] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.932048] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.932167] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.932281] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.932393] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.942942] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1000.954338] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1000.964045] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 52738695-8a04-457a-a58e-46c214422409 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1000.973675] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 5b2c34cf-302d-4898-b3f5-d0feb6c1bcc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1000.982641] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 78561ca7-b99f-414f-a8f9-1abf127ae3be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1000.991420] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 187f308b-b2b4-4b60-b490-71b8a74f916e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.000050] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance a31635f4-c7c8-4498-b825-b3a159400096 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.008695] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance a4561267-9893-4c7f-b3cb-6887cf740cd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.017235] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 84ce4cab-05fb-46fe-b59f-c2851812c5dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.025702] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c86a2c4a-17c4-48da-aafc-ff2aa2f3699a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.034881] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 11d7e9e7-66ae-4e0c-abad-9542d6716ba6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.044341] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2538fc39-0bdf-45d4-a0c2-5a0c61b6b39e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.071734] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.082815] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 43a38849-0ca1-4b73-b677-ca2baacff863 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.093037] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 74928628-b30b-4e88-a2a4-82797d5c7965 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.102852] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f814f6-3e68-4729-8487-02e10c055cfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.112338] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 55fc09e9-581f-4ef2-a513-1b0c2f33dd75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.121738] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7d00a8a9-3ddc-4555-9025-9d06479b34dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.122029] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1001.122202] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1001.443720] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da89ebac-6dc3-4942-a32c-063cf12f958c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.451789] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3897e5ba-8abf-47bf-b809-31b83e431d31 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.482875] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067b23c6-664f-4e86-be92-6b10f1c61077 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.490128] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a4b95c-1a4e-4a54-86f2-185fbc5bd6b0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.503024] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.511624] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1001.527350] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1001.528123] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.668s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.528331] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1002.783404] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.783611] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.783880] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1003.783880] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1003.805954] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.806130] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.806263] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.806391] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.806512] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.806631] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.806749] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.806866] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.806986] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.807178] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1003.807352] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1004.784195] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1005.783190] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1005.784269] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.307036] env[68673]: WARNING oslo_vmware.rw_handles [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1026.307036] env[68673]: ERROR oslo_vmware.rw_handles [ 1026.307036] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1026.309222] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1026.309537] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Copying Virtual Disk [datastore1] vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/8bf1b6fc-ff33-439c-bbcc-e32a799c324a/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1026.309854] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8fcf696-ae6e-4d49-a822-07b96fe33828 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.319582] env[68673]: DEBUG oslo_vmware.api [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for the task: (returnval){ [ 1026.319582] env[68673]: value = "task-3433500" [ 1026.319582] env[68673]: _type = "Task" [ 1026.319582] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.327997] env[68673]: DEBUG oslo_vmware.api [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Task: {'id': task-3433500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.830863] env[68673]: DEBUG oslo_vmware.exceptions [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1026.830863] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.831198] env[68673]: ERROR nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.831198] env[68673]: Faults: ['InvalidArgument'] [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Traceback (most recent call last): [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] yield resources [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] self.driver.spawn(context, instance, image_meta, [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] self._fetch_image_if_missing(context, vi) [ 1026.831198] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] image_cache(vi, tmp_image_ds_loc) [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] vm_util.copy_virtual_disk( [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] session._wait_for_task(vmdk_copy_task) [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] return self.wait_for_task(task_ref) [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] return evt.wait() [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] result = hub.switch() [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1026.831501] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] return self.greenlet.switch() [ 1026.831914] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1026.831914] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] self.f(*self.args, **self.kw) [ 1026.831914] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1026.831914] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] raise exceptions.translate_fault(task_info.error) [ 1026.831914] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.831914] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Faults: ['InvalidArgument'] [ 1026.831914] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] [ 1026.831914] env[68673]: INFO nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Terminating instance [ 1026.833234] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.833343] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.833517] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcc6520a-a07b-4a09-b123-eb3af8f17c78 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.835768] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1026.835955] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.836695] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630485de-6e5e-44eb-a48c-cf20ec711aa7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.844366] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1026.844940] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79a4b4fe-610e-435c-99c2-87870fbd7c8e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.847202] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.847395] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1026.848354] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-304c0b66-54d4-4889-b9f7-8dff24ef8c87 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.852989] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1026.852989] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5265f64e-4ca9-7fd4-8a7e-f75c04724e68" [ 1026.852989] env[68673]: _type = "Task" [ 1026.852989] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.860110] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5265f64e-4ca9-7fd4-8a7e-f75c04724e68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.251054] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1027.251254] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1027.251435] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Deleting the datastore file [datastore1] 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1027.251700] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71066ff3-be9e-4314-a8fb-3401b979ca2a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.258445] env[68673]: DEBUG oslo_vmware.api [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for the task: (returnval){ [ 1027.258445] env[68673]: value = "task-3433502" [ 1027.258445] env[68673]: _type = "Task" [ 1027.258445] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.266685] env[68673]: DEBUG oslo_vmware.api [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Task: {'id': task-3433502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.363051] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1027.363051] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating directory with path [datastore1] vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.363396] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-599d12f6-c6be-4e5a-a6a9-3e78af0ea774 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.373977] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created directory with path [datastore1] vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.374185] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Fetch image to [datastore1] vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1027.374358] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1027.375104] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07197efc-7b03-45b0-998d-01b0b713e4b2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.381348] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d938bf69-64fc-455d-aef8-8e2d617baee9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.390327] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c5dbd5-daf0-4380-9e31-3deb5fae8c9e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.420992] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7676fa2f-b2ed-4732-9e52-055b2522bf89 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.426544] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8e79d5a0-7395-4c2d-8621-7ee58706b996 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.449237] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1027.501041] env[68673]: DEBUG oslo_vmware.rw_handles [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1027.562797] env[68673]: DEBUG oslo_vmware.rw_handles [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1027.563011] env[68673]: DEBUG oslo_vmware.rw_handles [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1027.769083] env[68673]: DEBUG oslo_vmware.api [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Task: {'id': task-3433502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083704} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.769407] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.769602] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1027.769779] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1027.769951] env[68673]: INFO nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Took 0.93 seconds to destroy the instance on the hypervisor. [ 1027.772114] env[68673]: DEBUG nova.compute.claims [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1027.772294] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.772503] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.114682] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0e3a53-5207-4e93-b2a8-ed10a0a525b5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.123620] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6f65bc-fa85-4eac-b0e2-d113d9f6f803 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.152904] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775148c7-ab1f-4507-a0c7-4dbb7718d580 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.159803] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9a3f22-8a9f-4e04-88c5-e2f94984863b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.172720] env[68673]: DEBUG nova.compute.provider_tree [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.183772] env[68673]: DEBUG nova.scheduler.client.report [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1028.199707] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.427s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.200247] env[68673]: ERROR nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1028.200247] env[68673]: Faults: ['InvalidArgument'] [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Traceback (most recent call last): [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] self.driver.spawn(context, instance, image_meta, [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] self._fetch_image_if_missing(context, vi) [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] image_cache(vi, tmp_image_ds_loc) [ 1028.200247] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] vm_util.copy_virtual_disk( [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] session._wait_for_task(vmdk_copy_task) [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] return self.wait_for_task(task_ref) [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] return evt.wait() [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] result = hub.switch() [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] return self.greenlet.switch() [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1028.200574] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] self.f(*self.args, **self.kw) [ 1028.200901] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1028.200901] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] raise exceptions.translate_fault(task_info.error) [ 1028.200901] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1028.200901] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Faults: ['InvalidArgument'] [ 1028.200901] env[68673]: ERROR nova.compute.manager [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] [ 1028.201042] env[68673]: DEBUG nova.compute.utils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1028.202417] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Build of instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 was re-scheduled: A specified parameter was not correct: fileType [ 1028.202417] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1028.202836] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1028.203020] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1028.203177] env[68673]: DEBUG nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1028.203343] env[68673]: DEBUG nova.network.neutron [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1028.525946] env[68673]: DEBUG nova.network.neutron [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.538583] env[68673]: INFO nova.compute.manager [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Took 0.34 seconds to deallocate network for instance. [ 1028.648672] env[68673]: INFO nova.scheduler.client.report [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Deleted allocations for instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 [ 1028.678095] env[68673]: DEBUG oslo_concurrency.lockutils [None req-10040db6-39d8-4b6d-ba6e-993a7da33cae tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 390.287s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.679365] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 192.619s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.679587] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.679788] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.679949] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.688827] env[68673]: INFO nova.compute.manager [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Terminating instance [ 1028.690726] env[68673]: DEBUG nova.compute.manager [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1028.690918] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1028.691202] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ccf2fceb-3149-47b5-99b8-fc69a9036ee2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.696753] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1028.704190] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad8ea80-01df-4637-b7f5-d5809de6d214 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.739051] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 837911fc-a8f2-41f9-bc0b-a3af4f29bd07 could not be found. [ 1028.739327] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1028.739461] env[68673]: INFO nova.compute.manager [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1028.739766] env[68673]: DEBUG oslo.service.loopingcall [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.740904] env[68673]: DEBUG nova.compute.manager [-] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1028.740904] env[68673]: DEBUG nova.network.neutron [-] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1028.757988] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.758272] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.760008] env[68673]: INFO nova.compute.claims [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.788863] env[68673]: DEBUG nova.network.neutron [-] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.804350] env[68673]: INFO nova.compute.manager [-] [instance: 837911fc-a8f2-41f9-bc0b-a3af4f29bd07] Took 0.06 seconds to deallocate network for instance. [ 1028.903794] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5102ba08-8b59-4986-8133-3406e0d77b0b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "837911fc-a8f2-41f9-bc0b-a3af4f29bd07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.224s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.157501] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c7b0b6-20bb-4d4a-a99d-f8d88ba9af7b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.166261] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e319af92-585a-4911-adfe-9cef795bcd78 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.196679] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2d43f2-0f2c-4349-abfd-29ae24be5801 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.204028] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aaa93ad-206f-4019-b859-920d6dbae122 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.217413] env[68673]: DEBUG nova.compute.provider_tree [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.225902] env[68673]: DEBUG nova.scheduler.client.report [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1029.241226] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.483s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.241697] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1029.280997] env[68673]: DEBUG nova.compute.utils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1029.282290] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1029.282497] env[68673]: DEBUG nova.network.neutron [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1029.291313] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1029.360305] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1029.363357] env[68673]: DEBUG nova.policy [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f54ff55ee6444dcf962befffaf36f0aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9873b3438c234de88881f85a24c523cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1029.387722] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1029.387978] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1029.388150] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.388333] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1029.388492] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.388686] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1029.388908] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1029.389202] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1029.389404] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1029.389574] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1029.389748] env[68673]: DEBUG nova.virt.hardware [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1029.390704] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c063131-b99f-42fa-81ec-5c5ce0bcf40f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.402834] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be37e33f-b603-4ac6-a08f-9969199e9022 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.798166] env[68673]: DEBUG nova.network.neutron [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Successfully created port: 76f2405b-2112-4de7-b834-ac63cfb77c40 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1030.260593] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.260951] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.885214] env[68673]: DEBUG nova.compute.manager [req-a0111467-c335-4228-8842-6da492e5be16 req-cff82925-d376-4190-ad02-e6eccb22f12f service nova] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Received event network-vif-plugged-76f2405b-2112-4de7-b834-ac63cfb77c40 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1030.885439] env[68673]: DEBUG oslo_concurrency.lockutils [req-a0111467-c335-4228-8842-6da492e5be16 req-cff82925-d376-4190-ad02-e6eccb22f12f service nova] Acquiring lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.885664] env[68673]: DEBUG oslo_concurrency.lockutils [req-a0111467-c335-4228-8842-6da492e5be16 req-cff82925-d376-4190-ad02-e6eccb22f12f service nova] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.885831] env[68673]: DEBUG oslo_concurrency.lockutils [req-a0111467-c335-4228-8842-6da492e5be16 req-cff82925-d376-4190-ad02-e6eccb22f12f service nova] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.886025] env[68673]: DEBUG nova.compute.manager [req-a0111467-c335-4228-8842-6da492e5be16 req-cff82925-d376-4190-ad02-e6eccb22f12f service nova] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] No waiting events found dispatching network-vif-plugged-76f2405b-2112-4de7-b834-ac63cfb77c40 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1030.886480] env[68673]: WARNING nova.compute.manager [req-a0111467-c335-4228-8842-6da492e5be16 req-cff82925-d376-4190-ad02-e6eccb22f12f service nova] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Received unexpected event network-vif-plugged-76f2405b-2112-4de7-b834-ac63cfb77c40 for instance with vm_state building and task_state spawning. [ 1030.889119] env[68673]: DEBUG nova.network.neutron [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Successfully updated port: 76f2405b-2112-4de7-b834-ac63cfb77c40 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.901358] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquiring lock "refresh_cache-4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.901510] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquired lock "refresh_cache-4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.901933] env[68673]: DEBUG nova.network.neutron [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1030.944302] env[68673]: DEBUG nova.network.neutron [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1031.138364] env[68673]: DEBUG nova.network.neutron [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Updating instance_info_cache with network_info: [{"id": "76f2405b-2112-4de7-b834-ac63cfb77c40", "address": "fa:16:3e:81:79:23", "network": {"id": "a913ae9c-7639-4fce-b2e6-5dd97c9c857d", "bridge": "br-int", "label": "tempest-ServersTestJSON-749273901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9873b3438c234de88881f85a24c523cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f2405b-21", "ovs_interfaceid": "76f2405b-2112-4de7-b834-ac63cfb77c40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.152030] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Releasing lock "refresh_cache-4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.152334] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Instance network_info: |[{"id": "76f2405b-2112-4de7-b834-ac63cfb77c40", "address": "fa:16:3e:81:79:23", "network": {"id": "a913ae9c-7639-4fce-b2e6-5dd97c9c857d", "bridge": "br-int", "label": "tempest-ServersTestJSON-749273901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9873b3438c234de88881f85a24c523cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f2405b-21", "ovs_interfaceid": "76f2405b-2112-4de7-b834-ac63cfb77c40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1031.152737] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:79:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e9fa4744-8702-4973-b911-ee18192a3e4b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76f2405b-2112-4de7-b834-ac63cfb77c40', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1031.160651] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Creating folder: Project (9873b3438c234de88881f85a24c523cd). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1031.161193] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abeabd76-c820-4220-aff7-0de94c6b5177 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.171996] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Created folder: Project (9873b3438c234de88881f85a24c523cd) in parent group-v685311. [ 1031.172199] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Creating folder: Instances. Parent ref: group-v685368. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1031.172418] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5352e55-72e0-4c41-a04b-70266217155b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.182448] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Created folder: Instances in parent group-v685368. [ 1031.182675] env[68673]: DEBUG oslo.service.loopingcall [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.182864] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1031.183063] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41dd2848-6095-4cf6-afdd-426a3ecad69b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.201680] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1031.201680] env[68673]: value = "task-3433505" [ 1031.201680] env[68673]: _type = "Task" [ 1031.201680] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.210199] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433505, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.712168] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433505, 'name': CreateVM_Task, 'duration_secs': 0.361479} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.712442] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1031.713165] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.713303] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.713638] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1031.713881] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54e5c46e-41bb-4cd6-abd0-65d9d4aa91ba {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.718108] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Waiting for the task: (returnval){ [ 1031.718108] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]520deba3-a30a-3f90-93ae-d946da25bf5f" [ 1031.718108] env[68673]: _type = "Task" [ 1031.718108] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.725307] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]520deba3-a30a-3f90-93ae-d946da25bf5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.229044] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.229293] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1032.229502] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.263689] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.263929] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.912319] env[68673]: DEBUG nova.compute.manager [req-fa73a818-e8f8-428b-a612-22e932fa0419 req-cd179b5d-a74f-43b9-a6e0-e3732db7fbd3 service nova] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Received event network-changed-76f2405b-2112-4de7-b834-ac63cfb77c40 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1032.912518] env[68673]: DEBUG nova.compute.manager [req-fa73a818-e8f8-428b-a612-22e932fa0419 req-cd179b5d-a74f-43b9-a6e0-e3732db7fbd3 service nova] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Refreshing instance network info cache due to event network-changed-76f2405b-2112-4de7-b834-ac63cfb77c40. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1032.912727] env[68673]: DEBUG oslo_concurrency.lockutils [req-fa73a818-e8f8-428b-a612-22e932fa0419 req-cd179b5d-a74f-43b9-a6e0-e3732db7fbd3 service nova] Acquiring lock "refresh_cache-4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.912867] env[68673]: DEBUG oslo_concurrency.lockutils [req-fa73a818-e8f8-428b-a612-22e932fa0419 req-cd179b5d-a74f-43b9-a6e0-e3732db7fbd3 service nova] Acquired lock "refresh_cache-4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.913040] env[68673]: DEBUG nova.network.neutron [req-fa73a818-e8f8-428b-a612-22e932fa0419 req-cd179b5d-a74f-43b9-a6e0-e3732db7fbd3 service nova] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Refreshing network info cache for port 76f2405b-2112-4de7-b834-ac63cfb77c40 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1033.259389] env[68673]: DEBUG nova.network.neutron [req-fa73a818-e8f8-428b-a612-22e932fa0419 req-cd179b5d-a74f-43b9-a6e0-e3732db7fbd3 service nova] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Updated VIF entry in instance network info cache for port 76f2405b-2112-4de7-b834-ac63cfb77c40. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1033.259698] env[68673]: DEBUG nova.network.neutron [req-fa73a818-e8f8-428b-a612-22e932fa0419 req-cd179b5d-a74f-43b9-a6e0-e3732db7fbd3 service nova] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Updating instance_info_cache with network_info: [{"id": "76f2405b-2112-4de7-b834-ac63cfb77c40", "address": "fa:16:3e:81:79:23", "network": {"id": "a913ae9c-7639-4fce-b2e6-5dd97c9c857d", "bridge": "br-int", "label": "tempest-ServersTestJSON-749273901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9873b3438c234de88881f85a24c523cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f2405b-21", "ovs_interfaceid": "76f2405b-2112-4de7-b834-ac63cfb77c40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.270236] env[68673]: DEBUG oslo_concurrency.lockutils [req-fa73a818-e8f8-428b-a612-22e932fa0419 req-cd179b5d-a74f-43b9-a6e0-e3732db7fbd3 service nova] Releasing lock "refresh_cache-4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.058733] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "72c1282a-8a71-4952-a02a-b6dd45269488" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.059481] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "72c1282a-8a71-4952-a02a-b6dd45269488" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.892047] env[68673]: DEBUG oslo_concurrency.lockutils [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.681241] env[68673]: DEBUG oslo_concurrency.lockutils [None req-24d09f2c-e046-43da-8899-12c3f3c3343b tempest-FloatingIPsAssociationNegativeTestJSON-319373223 tempest-FloatingIPsAssociationNegativeTestJSON-319373223-project-member] Acquiring lock "2c59faff-0fb2-452c-9b1f-3dde3767d699" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.681549] env[68673]: DEBUG oslo_concurrency.lockutils [None req-24d09f2c-e046-43da-8899-12c3f3c3343b tempest-FloatingIPsAssociationNegativeTestJSON-319373223 tempest-FloatingIPsAssociationNegativeTestJSON-319373223-project-member] Lock "2c59faff-0fb2-452c-9b1f-3dde3767d699" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.781444] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.783989] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.784251] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1061.784100] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1061.795226] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.795455] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.795636] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.795783] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1061.796960] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd4de88-7788-4f2e-9069-db0c90727603 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.805441] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fd3109-6919-4e81-b85a-91c995b7e102 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.819383] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4823af85-3181-4de3-a895-9e494c8de4b3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.825482] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e1d51a-3adb-45a9-b4ea-2f080c81a938 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.854137] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180907MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1061.854292] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.854476] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.932277] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.932435] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.932559] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.932680] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.932795] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.932910] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.933035] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.933166] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.933302] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.933423] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.944720] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1061.954890] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 52738695-8a04-457a-a58e-46c214422409 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1061.965506] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 5b2c34cf-302d-4898-b3f5-d0feb6c1bcc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1061.975358] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 78561ca7-b99f-414f-a8f9-1abf127ae3be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1061.985526] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 187f308b-b2b4-4b60-b490-71b8a74f916e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1061.994910] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance a31635f4-c7c8-4498-b825-b3a159400096 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.004567] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance a4561267-9893-4c7f-b3cb-6887cf740cd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.015233] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 84ce4cab-05fb-46fe-b59f-c2851812c5dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.025240] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c86a2c4a-17c4-48da-aafc-ff2aa2f3699a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.034863] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 11d7e9e7-66ae-4e0c-abad-9542d6716ba6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.044628] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2538fc39-0bdf-45d4-a0c2-5a0c61b6b39e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.054383] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.064452] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 43a38849-0ca1-4b73-b677-ca2baacff863 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.074226] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 74928628-b30b-4e88-a2a4-82797d5c7965 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.084227] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f814f6-3e68-4729-8487-02e10c055cfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.093957] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 55fc09e9-581f-4ef2-a513-1b0c2f33dd75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.104591] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7d00a8a9-3ddc-4555-9025-9d06479b34dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.114942] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.124793] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.135201] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.143277] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2c59faff-0fb2-452c-9b1f-3dde3767d699 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.143522] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1062.143671] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1062.472347] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930bd081-b028-485d-8bdb-ec2cce490cb5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.479650] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b13dcb-02e6-4788-8ab4-0817177c2199 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.509650] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7e7a3a-d10b-4cdd-a7ce-1ea36a6abe4c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.516578] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f746a7-6b8d-4a24-953a-cac702536ef1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.529404] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.537466] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1062.552992] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1062.552992] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.698s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.554122] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1064.554365] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1064.554365] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1064.575381] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.575543] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.575673] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.575796] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.575920] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.576217] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.576409] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.576540] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.576666] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.576784] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1064.576905] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1064.577566] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1064.783747] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.779307] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.804739] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.783878] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.784165] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.810745] env[68673]: DEBUG oslo_concurrency.lockutils [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.881760] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "f4e540de-0b46-424b-894d-8ec0416d9828" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.605144] env[68673]: WARNING oslo_vmware.rw_handles [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1072.605144] env[68673]: ERROR oslo_vmware.rw_handles [ 1072.605957] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1072.607670] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1072.607956] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Copying Virtual Disk [datastore1] vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/a929eee9-6b2f-4194-bb5c-788b64f6e05e/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1072.608616] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59b25530-a326-43e9-8cf2-380380de04bf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.613030] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquiring lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.617385] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1072.617385] env[68673]: value = "task-3433506" [ 1072.617385] env[68673]: _type = "Task" [ 1072.617385] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.625758] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.129671] env[68673]: DEBUG oslo_vmware.exceptions [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1073.130022] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.130584] env[68673]: ERROR nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1073.130584] env[68673]: Faults: ['InvalidArgument'] [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Traceback (most recent call last): [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] yield resources [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] self.driver.spawn(context, instance, image_meta, [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] self._fetch_image_if_missing(context, vi) [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1073.130584] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] image_cache(vi, tmp_image_ds_loc) [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] vm_util.copy_virtual_disk( [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] session._wait_for_task(vmdk_copy_task) [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] return self.wait_for_task(task_ref) [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] return evt.wait() [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] result = hub.switch() [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] return self.greenlet.switch() [ 1073.131325] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1073.131933] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] self.f(*self.args, **self.kw) [ 1073.131933] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1073.131933] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] raise exceptions.translate_fault(task_info.error) [ 1073.131933] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1073.131933] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Faults: ['InvalidArgument'] [ 1073.131933] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] [ 1073.131933] env[68673]: INFO nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Terminating instance [ 1073.132533] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.132760] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1073.133711] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1073.133832] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1073.134677] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a691b82-a841-4f4e-abcc-2fa0dd7bebd3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.136740] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0dc706-3537-4935-be51-414482d3a079 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.143995] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1073.145029] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d923f62-7381-414f-9ab8-d0794b8f8f68 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.146439] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1073.146629] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1073.147290] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ca90e54-a072-454e-a058-50b5d83c6392 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.152459] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Waiting for the task: (returnval){ [ 1073.152459] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52be3be2-8315-bfbb-c542-ff5d7366268b" [ 1073.152459] env[68673]: _type = "Task" [ 1073.152459] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.159961] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52be3be2-8315-bfbb-c542-ff5d7366268b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.232153] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1073.232153] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1073.232153] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleting the datastore file [datastore1] c09c33d3-ae8a-4057-9f7d-6a4b4948423d {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.232397] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16465b3c-0839-4ee2-813a-5e11fef6c477 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.239041] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1073.239041] env[68673]: value = "task-3433508" [ 1073.239041] env[68673]: _type = "Task" [ 1073.239041] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.247492] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.662251] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1073.662551] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Creating directory with path [datastore1] vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1073.662797] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a3c7d55-d7e9-4ff3-963e-dfc9cec6a586 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.674460] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Created directory with path [datastore1] vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1073.674708] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Fetch image to [datastore1] vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1073.674926] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1073.675689] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1812a40-4579-4c48-b90a-63913bb755be {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.682172] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a499dd0-5e1f-4535-8aa3-6c2d41c128f8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.691481] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04f1223-efed-4d3c-ba3b-e0fe3fdc3bbe {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.726253] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a8aa39-e1d1-4e62-b144-706f7172c3b2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.732138] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1cc08434-fb44-4137-a99c-8a64b9a148bc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.747821] env[68673]: DEBUG oslo_vmware.api [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083369} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.748089] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.748334] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1073.748687] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1073.748894] env[68673]: INFO nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1073.751017] env[68673]: DEBUG nova.compute.claims [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1073.751410] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.751410] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.755539] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1073.815397] env[68673]: DEBUG oslo_vmware.rw_handles [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1073.876933] env[68673]: DEBUG oslo_vmware.rw_handles [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1073.877188] env[68673]: DEBUG oslo_vmware.rw_handles [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1074.210572] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2337f9ed-1d75-4815-bd6e-b077c9ef3fa6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.217907] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb0d581-96e1-4616-923f-55af0e6dd0b1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.249723] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b5274b-09da-4b40-9360-e94e9740f88d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.256385] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc73ccc-0743-4e29-9720-e4137c39d5df {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.269831] env[68673]: DEBUG nova.compute.provider_tree [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.277550] env[68673]: DEBUG nova.scheduler.client.report [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1074.295510] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.544s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.296044] env[68673]: ERROR nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1074.296044] env[68673]: Faults: ['InvalidArgument'] [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Traceback (most recent call last): [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] self.driver.spawn(context, instance, image_meta, [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] self._fetch_image_if_missing(context, vi) [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] image_cache(vi, tmp_image_ds_loc) [ 1074.296044] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] vm_util.copy_virtual_disk( [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] session._wait_for_task(vmdk_copy_task) [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] return self.wait_for_task(task_ref) [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] return evt.wait() [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] result = hub.switch() [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] return self.greenlet.switch() [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1074.296401] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] self.f(*self.args, **self.kw) [ 1074.296717] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1074.296717] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] raise exceptions.translate_fault(task_info.error) [ 1074.296717] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1074.296717] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Faults: ['InvalidArgument'] [ 1074.296717] env[68673]: ERROR nova.compute.manager [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] [ 1074.296894] env[68673]: DEBUG nova.compute.utils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1074.298231] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Build of instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d was re-scheduled: A specified parameter was not correct: fileType [ 1074.298231] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1074.298601] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1074.298837] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1074.299018] env[68673]: DEBUG nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1074.299191] env[68673]: DEBUG nova.network.neutron [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1074.618016] env[68673]: DEBUG nova.network.neutron [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.628430] env[68673]: INFO nova.compute.manager [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Took 0.33 seconds to deallocate network for instance. [ 1074.718474] env[68673]: INFO nova.scheduler.client.report [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleted allocations for instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d [ 1074.738023] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8376a722-f195-4279-a429-fbb08f791af1 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 435.922s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.738821] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 239.169s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.739062] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.739270] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.739431] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.741377] env[68673]: INFO nova.compute.manager [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Terminating instance [ 1074.743265] env[68673]: DEBUG nova.compute.manager [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1074.743265] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1074.743607] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56080663-2948-4579-815c-f797d26caba8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.753157] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43916d3-9b9c-4dc8-b849-92242002f7d0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.763915] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1074.784256] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c09c33d3-ae8a-4057-9f7d-6a4b4948423d could not be found. [ 1074.784457] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1074.784632] env[68673]: INFO nova.compute.manager [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1074.784864] env[68673]: DEBUG oslo.service.loopingcall [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1074.785086] env[68673]: DEBUG nova.compute.manager [-] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1074.785179] env[68673]: DEBUG nova.network.neutron [-] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1074.807956] env[68673]: DEBUG nova.network.neutron [-] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.815137] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.815491] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.817111] env[68673]: INFO nova.compute.claims [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.821012] env[68673]: INFO nova.compute.manager [-] [instance: c09c33d3-ae8a-4057-9f7d-6a4b4948423d] Took 0.04 seconds to deallocate network for instance. [ 1074.940608] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ee2b07f-273f-443f-ae8f-6cd25014835e tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c09c33d3-ae8a-4057-9f7d-6a4b4948423d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.279149] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c121a3-84c1-464e-8732-a15ef63eb3a2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.286764] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba33d37-432d-4966-9471-9548e6efadc5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.315766] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e306dd-d14c-40f2-aa7c-f1075c9fe81a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.322970] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee1057f-1ff9-4d40-b5c6-9f373d295408 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.337575] env[68673]: DEBUG nova.compute.provider_tree [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.346107] env[68673]: DEBUG nova.scheduler.client.report [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1075.359781] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.544s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.360289] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1075.392677] env[68673]: DEBUG nova.compute.utils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1075.394151] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1075.394328] env[68673]: DEBUG nova.network.neutron [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1075.404395] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1075.450643] env[68673]: DEBUG nova.policy [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fa21131a12b499da25d3b560cf45dc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '703b63ef1a6049a389f26eead64ab4d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1075.470768] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1075.500011] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1075.500286] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1075.500446] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.500629] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1075.500775] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.500973] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1075.501248] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1075.501406] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1075.501573] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1075.501737] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1075.502012] env[68673]: DEBUG nova.virt.hardware [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1075.503073] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187c3161-de83-4443-bf8a-22f925373776 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.510765] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8ec6d4-b4cf-4437-9370-997f3f966ef6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.848379] env[68673]: DEBUG nova.network.neutron [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Successfully created port: ef29c031-a134-40bd-90b2-d88a07c29174 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1076.580125] env[68673]: DEBUG nova.network.neutron [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Successfully updated port: ef29c031-a134-40bd-90b2-d88a07c29174 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1076.590940] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "refresh_cache-94d40e8f-639a-4695-8d3d-1b0d81e29695" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.590998] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquired lock "refresh_cache-94d40e8f-639a-4695-8d3d-1b0d81e29695" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.591174] env[68673]: DEBUG nova.network.neutron [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1076.632165] env[68673]: DEBUG nova.network.neutron [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1076.661505] env[68673]: DEBUG nova.compute.manager [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Received event network-vif-plugged-ef29c031-a134-40bd-90b2-d88a07c29174 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1076.661505] env[68673]: DEBUG oslo_concurrency.lockutils [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] Acquiring lock "94d40e8f-639a-4695-8d3d-1b0d81e29695-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.661505] env[68673]: DEBUG oslo_concurrency.lockutils [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.661505] env[68673]: DEBUG oslo_concurrency.lockutils [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.661919] env[68673]: DEBUG nova.compute.manager [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] No waiting events found dispatching network-vif-plugged-ef29c031-a134-40bd-90b2-d88a07c29174 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1076.661919] env[68673]: WARNING nova.compute.manager [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Received unexpected event network-vif-plugged-ef29c031-a134-40bd-90b2-d88a07c29174 for instance with vm_state building and task_state spawning. [ 1076.661919] env[68673]: DEBUG nova.compute.manager [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Received event network-changed-ef29c031-a134-40bd-90b2-d88a07c29174 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1076.661919] env[68673]: DEBUG nova.compute.manager [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Refreshing instance network info cache due to event network-changed-ef29c031-a134-40bd-90b2-d88a07c29174. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1076.662169] env[68673]: DEBUG oslo_concurrency.lockutils [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] Acquiring lock "refresh_cache-94d40e8f-639a-4695-8d3d-1b0d81e29695" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.850684] env[68673]: DEBUG nova.network.neutron [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Updating instance_info_cache with network_info: [{"id": "ef29c031-a134-40bd-90b2-d88a07c29174", "address": "fa:16:3e:34:16:3c", "network": {"id": "c73ba772-d126-4ca7-b2f8-5f3fb067da0c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1997758298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "703b63ef1a6049a389f26eead64ab4d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef29c031-a1", "ovs_interfaceid": "ef29c031-a134-40bd-90b2-d88a07c29174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.863914] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Releasing lock "refresh_cache-94d40e8f-639a-4695-8d3d-1b0d81e29695" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.864233] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Instance network_info: |[{"id": "ef29c031-a134-40bd-90b2-d88a07c29174", "address": "fa:16:3e:34:16:3c", "network": {"id": "c73ba772-d126-4ca7-b2f8-5f3fb067da0c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1997758298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "703b63ef1a6049a389f26eead64ab4d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef29c031-a1", "ovs_interfaceid": "ef29c031-a134-40bd-90b2-d88a07c29174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1076.864525] env[68673]: DEBUG oslo_concurrency.lockutils [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] Acquired lock "refresh_cache-94d40e8f-639a-4695-8d3d-1b0d81e29695" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.864698] env[68673]: DEBUG nova.network.neutron [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Refreshing network info cache for port ef29c031-a134-40bd-90b2-d88a07c29174 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1076.866132] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:16:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd298db54-f13d-4bf6-b6c2-755074b3047f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef29c031-a134-40bd-90b2-d88a07c29174', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.873999] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Creating folder: Project (703b63ef1a6049a389f26eead64ab4d6). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1076.874778] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-813c55da-0171-426f-b21f-099a3060f731 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.887830] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Created folder: Project (703b63ef1a6049a389f26eead64ab4d6) in parent group-v685311. [ 1076.888018] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Creating folder: Instances. Parent ref: group-v685371. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1076.888245] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f7860d7-2f00-4863-ad01-fc7781e2d1a1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.896335] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Created folder: Instances in parent group-v685371. [ 1076.896553] env[68673]: DEBUG oslo.service.loopingcall [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.896807] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1076.897013] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b58751c5-060e-4d65-89d0-5a080218ca95 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.916653] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.916653] env[68673]: value = "task-3433511" [ 1076.916653] env[68673]: _type = "Task" [ 1076.916653] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.924081] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433511, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.125512] env[68673]: DEBUG nova.network.neutron [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Updated VIF entry in instance network info cache for port ef29c031-a134-40bd-90b2-d88a07c29174. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1077.125865] env[68673]: DEBUG nova.network.neutron [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Updating instance_info_cache with network_info: [{"id": "ef29c031-a134-40bd-90b2-d88a07c29174", "address": "fa:16:3e:34:16:3c", "network": {"id": "c73ba772-d126-4ca7-b2f8-5f3fb067da0c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1997758298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "703b63ef1a6049a389f26eead64ab4d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef29c031-a1", "ovs_interfaceid": "ef29c031-a134-40bd-90b2-d88a07c29174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.135563] env[68673]: DEBUG oslo_concurrency.lockutils [req-b54d8623-ad6a-4df7-9c3f-5c51c6c3fc89 req-43a678f4-f705-40c0-ab95-653c03a75eb9 service nova] Releasing lock "refresh_cache-94d40e8f-639a-4695-8d3d-1b0d81e29695" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.426297] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433511, 'name': CreateVM_Task, 'duration_secs': 0.278209} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.426469] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1077.427162] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.427331] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.427637] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1077.427882] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b1a849b-1b2b-4290-88b5-413f8a4af55b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.432990] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for the task: (returnval){ [ 1077.432990] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]521971fa-d00a-9392-3cc2-aadc9014ff93" [ 1077.432990] env[68673]: _type = "Task" [ 1077.432990] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.439989] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]521971fa-d00a-9392-3cc2-aadc9014ff93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.942984] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.943297] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1077.943476] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.048428] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.280477] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.280772] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.419716] env[68673]: DEBUG oslo_concurrency.lockutils [None req-63b49a18-1b59-4db2-84f9-e5e843d74d77 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] Acquiring lock "8aed9c82-b082-4f49-acf7-015a7e78e452" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.420136] env[68673]: DEBUG oslo_concurrency.lockutils [None req-63b49a18-1b59-4db2-84f9-e5e843d74d77 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] Lock "8aed9c82-b082-4f49-acf7-015a7e78e452" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.661301] env[68673]: DEBUG oslo_concurrency.lockutils [None req-dee4ce23-921f-4cfc-bc18-ff9fe90850a8 tempest-ServerTagsTestJSON-238178112 tempest-ServerTagsTestJSON-238178112-project-member] Acquiring lock "af1da6ac-13b7-4271-b917-204dfe91cced" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.661638] env[68673]: DEBUG oslo_concurrency.lockutils [None req-dee4ce23-921f-4cfc-bc18-ff9fe90850a8 tempest-ServerTagsTestJSON-238178112 tempest-ServerTagsTestJSON-238178112-project-member] Lock "af1da6ac-13b7-4271-b917-204dfe91cced" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.779382] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.783889] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.784178] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1123.784609] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.797100] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.797320] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.797482] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.797632] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1123.798882] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21acc941-9444-4307-9884-339410269cad {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.807799] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940a5ba9-25a4-490f-b599-8af345ad9b1b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.812242] env[68673]: WARNING oslo_vmware.rw_handles [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1123.812242] env[68673]: ERROR oslo_vmware.rw_handles [ 1123.812655] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1123.814707] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1123.815087] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Copying Virtual Disk [datastore1] vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/2e197edd-4e23-4cce-8dd2-2abf57343f9b/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1123.815666] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23ff6815-ac40-464b-ab24-284e5474f5f2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.828566] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aab122b-e226-434d-8a1f-c1768461f2df {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.830846] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Waiting for the task: (returnval){ [ 1123.830846] env[68673]: value = "task-3433512" [ 1123.830846] env[68673]: _type = "Task" [ 1123.830846] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.837592] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067cf031-15bc-4048-b2c5-ee4eecf9d3bd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.843514] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Task: {'id': task-3433512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.871773] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180907MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1123.871773] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.871773] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.943325] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.943477] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.943603] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.943724] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.943841] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.943957] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.944083] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.944198] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.944307] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.944415] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.955832] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.966467] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 43a38849-0ca1-4b73-b677-ca2baacff863 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.976250] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 74928628-b30b-4e88-a2a4-82797d5c7965 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.986272] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f814f6-3e68-4729-8487-02e10c055cfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.997149] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 55fc09e9-581f-4ef2-a513-1b0c2f33dd75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.006321] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 7d00a8a9-3ddc-4555-9025-9d06479b34dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.015706] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.024852] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.034154] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.043614] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2c59faff-0fb2-452c-9b1f-3dde3767d699 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.052582] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.062705] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 8aed9c82-b082-4f49-acf7-015a7e78e452 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.073028] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance af1da6ac-13b7-4271-b917-204dfe91cced has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1124.073028] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1124.073028] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1124.328016] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19995bc-e113-4e88-b7bf-7c3e784638a2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.341936] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f411464-12bf-4164-b5ff-ef86d4bb8a4c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.344810] env[68673]: DEBUG oslo_vmware.exceptions [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1124.345083] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.345621] env[68673]: ERROR nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1124.345621] env[68673]: Faults: ['InvalidArgument'] [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Traceback (most recent call last): [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] yield resources [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] self.driver.spawn(context, instance, image_meta, [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] self._fetch_image_if_missing(context, vi) [ 1124.345621] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] image_cache(vi, tmp_image_ds_loc) [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] vm_util.copy_virtual_disk( [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] session._wait_for_task(vmdk_copy_task) [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] return self.wait_for_task(task_ref) [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] return evt.wait() [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] result = hub.switch() [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1124.345939] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] return self.greenlet.switch() [ 1124.346510] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1124.346510] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] self.f(*self.args, **self.kw) [ 1124.346510] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1124.346510] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] raise exceptions.translate_fault(task_info.error) [ 1124.346510] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1124.346510] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Faults: ['InvalidArgument'] [ 1124.346510] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] [ 1124.346510] env[68673]: INFO nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Terminating instance [ 1124.347604] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.347851] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1124.348423] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1124.348607] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1124.348829] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15aec59d-77c2-4c31-99fd-f04e89b01f21 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.350992] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04642206-18dc-4b78-8f85-c4c55d8c60c9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.377501] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3485d26e-2fd2-4d10-be41-d9bbba57390d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.381421] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1124.381894] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50fcbad6-9bb2-4003-b5a6-97f64495da43 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.385438] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1124.385655] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1124.386572] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1f4edfe-a178-4ca4-b79c-0d9f14775618 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.389565] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770a1d81-dd5e-4392-8a01-812767a9bfb4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.395581] env[68673]: DEBUG oslo_vmware.api [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for the task: (returnval){ [ 1124.395581] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5254a868-5620-2087-09b5-fe03b9f95357" [ 1124.395581] env[68673]: _type = "Task" [ 1124.395581] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.403357] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.412265] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1124.412492] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Creating directory with path [datastore1] vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1124.413210] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1124.416014] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-991f278e-cd07-41a9-905b-b65fdf1b1ee5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.426992] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1124.426992] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.555s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.436991] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Created directory with path [datastore1] vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1124.436991] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Fetch image to [datastore1] vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1124.436991] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1124.437648] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00d1469-e0e3-4d27-84f8-f423fffb1086 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.444504] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dc8088-edb7-40ce-b6ba-cc53a2772ab2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.454927] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88146947-75c6-4a42-87ec-040901ffae78 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.488082] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df692f5-0ab4-4eba-bc2c-b0ffcdee28c0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.494379] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cbc65b45-cf82-493a-a8a7-f0beead64533 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.513107] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1124.562697] env[68673]: DEBUG oslo_vmware.rw_handles [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1124.620278] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1124.620492] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1124.620669] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Deleting the datastore file [datastore1] 31700289-ac8c-47a9-b4e0-981b5c9df645 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1124.620930] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e345e18-0271-4276-921b-ec570764d4b4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.624894] env[68673]: DEBUG oslo_vmware.rw_handles [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1124.625078] env[68673]: DEBUG oslo_vmware.rw_handles [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1124.627381] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Waiting for the task: (returnval){ [ 1124.627381] env[68673]: value = "task-3433514" [ 1124.627381] env[68673]: _type = "Task" [ 1124.627381] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.635431] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Task: {'id': task-3433514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.136780] env[68673]: DEBUG oslo_vmware.api [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Task: {'id': task-3433514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077252} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.137063] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.137220] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1125.137390] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1125.137561] env[68673]: INFO nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Took 0.79 seconds to destroy the instance on the hypervisor. [ 1125.139671] env[68673]: DEBUG nova.compute.claims [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1125.139838] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.140056] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.426132] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.426361] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1125.426485] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1125.431425] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9f7819-a7cb-40e4-a38d-1fc6ef01c2d5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.438728] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8fc50b-ff35-42e9-927c-92487fdfa270 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.446289] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.446441] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.446574] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.446699] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.446824] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.446939] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.447081] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.447205] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.447321] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1125.447439] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1125.472432] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e7818c-5752-4d46-a142-fb573cb0b35f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.479918] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5082b93-e934-416c-855a-3a7439d03a49 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.492991] env[68673]: DEBUG nova.compute.provider_tree [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.500940] env[68673]: DEBUG nova.scheduler.client.report [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1125.515428] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.375s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.516027] env[68673]: ERROR nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1125.516027] env[68673]: Faults: ['InvalidArgument'] [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Traceback (most recent call last): [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] self.driver.spawn(context, instance, image_meta, [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] self._fetch_image_if_missing(context, vi) [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] image_cache(vi, tmp_image_ds_loc) [ 1125.516027] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] vm_util.copy_virtual_disk( [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] session._wait_for_task(vmdk_copy_task) [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] return self.wait_for_task(task_ref) [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] return evt.wait() [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] result = hub.switch() [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] return self.greenlet.switch() [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1125.516499] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] self.f(*self.args, **self.kw) [ 1125.517011] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1125.517011] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] raise exceptions.translate_fault(task_info.error) [ 1125.517011] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1125.517011] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Faults: ['InvalidArgument'] [ 1125.517011] env[68673]: ERROR nova.compute.manager [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] [ 1125.517011] env[68673]: DEBUG nova.compute.utils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1125.518296] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Build of instance 31700289-ac8c-47a9-b4e0-981b5c9df645 was re-scheduled: A specified parameter was not correct: fileType [ 1125.518296] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1125.518718] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1125.518925] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1125.519131] env[68673]: DEBUG nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1125.519841] env[68673]: DEBUG nova.network.neutron [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1125.783134] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.783347] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.839837] env[68673]: DEBUG nova.network.neutron [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.850189] env[68673]: INFO nova.compute.manager [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Took 0.33 seconds to deallocate network for instance. [ 1125.949958] env[68673]: INFO nova.scheduler.client.report [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Deleted allocations for instance 31700289-ac8c-47a9-b4e0-981b5c9df645 [ 1125.974019] env[68673]: DEBUG oslo_concurrency.lockutils [None req-095f7fcb-e887-4f71-90ac-bc5c6d3fad0e tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "31700289-ac8c-47a9-b4e0-981b5c9df645" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 486.021s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.975174] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "31700289-ac8c-47a9-b4e0-981b5c9df645" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 288.463s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.975393] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Acquiring lock "31700289-ac8c-47a9-b4e0-981b5c9df645-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.975886] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "31700289-ac8c-47a9-b4e0-981b5c9df645-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.975886] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "31700289-ac8c-47a9-b4e0-981b5c9df645-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.980295] env[68673]: INFO nova.compute.manager [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Terminating instance [ 1125.981508] env[68673]: DEBUG nova.compute.manager [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1125.981726] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1125.982219] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e755e5a5-0683-4455-8af2-d0018b67df74 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.991466] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5948499-8945-4c66-a70a-518e48748be8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.003823] env[68673]: DEBUG nova.compute.manager [None req-8a5f305b-baaa-4f1e-9220-fce7022f08c1 tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] [instance: 52738695-8a04-457a-a58e-46c214422409] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.029245] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 31700289-ac8c-47a9-b4e0-981b5c9df645 could not be found. [ 1126.029245] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1126.029245] env[68673]: INFO nova.compute.manager [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1126.029245] env[68673]: DEBUG oslo.service.loopingcall [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1126.029245] env[68673]: DEBUG nova.compute.manager [-] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1126.029692] env[68673]: DEBUG nova.network.neutron [-] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1126.032892] env[68673]: DEBUG nova.compute.manager [None req-8a5f305b-baaa-4f1e-9220-fce7022f08c1 tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] [instance: 52738695-8a04-457a-a58e-46c214422409] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.055600] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a5f305b-baaa-4f1e-9220-fce7022f08c1 tempest-SecurityGroupsTestJSON-403175465 tempest-SecurityGroupsTestJSON-403175465-project-member] Lock "52738695-8a04-457a-a58e-46c214422409" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.515s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.063952] env[68673]: DEBUG nova.network.neutron [-] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.081155] env[68673]: DEBUG nova.compute.manager [None req-1d60da41-c424-42a2-9520-fe114ce2a154 tempest-ServersV294TestFqdnHostnames-1595699051 tempest-ServersV294TestFqdnHostnames-1595699051-project-member] [instance: 5b2c34cf-302d-4898-b3f5-d0feb6c1bcc4] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.115565] env[68673]: INFO nova.compute.manager [-] [instance: 31700289-ac8c-47a9-b4e0-981b5c9df645] Took 0.09 seconds to deallocate network for instance. [ 1126.116013] env[68673]: DEBUG nova.compute.manager [None req-1d60da41-c424-42a2-9520-fe114ce2a154 tempest-ServersV294TestFqdnHostnames-1595699051 tempest-ServersV294TestFqdnHostnames-1595699051-project-member] [instance: 5b2c34cf-302d-4898-b3f5-d0feb6c1bcc4] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.139638] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1d60da41-c424-42a2-9520-fe114ce2a154 tempest-ServersV294TestFqdnHostnames-1595699051 tempest-ServersV294TestFqdnHostnames-1595699051-project-member] Lock "5b2c34cf-302d-4898-b3f5-d0feb6c1bcc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.466s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.151190] env[68673]: DEBUG nova.compute.manager [None req-439aa039-c81a-4d3c-85d0-559c6e9472e7 tempest-ServersTestBootFromVolume-927148198 tempest-ServersTestBootFromVolume-927148198-project-member] [instance: 78561ca7-b99f-414f-a8f9-1abf127ae3be] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.177769] env[68673]: DEBUG nova.compute.manager [None req-439aa039-c81a-4d3c-85d0-559c6e9472e7 tempest-ServersTestBootFromVolume-927148198 tempest-ServersTestBootFromVolume-927148198-project-member] [instance: 78561ca7-b99f-414f-a8f9-1abf127ae3be] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.203668] env[68673]: DEBUG oslo_concurrency.lockutils [None req-439aa039-c81a-4d3c-85d0-559c6e9472e7 tempest-ServersTestBootFromVolume-927148198 tempest-ServersTestBootFromVolume-927148198-project-member] Lock "78561ca7-b99f-414f-a8f9-1abf127ae3be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.573s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.217809] env[68673]: DEBUG nova.compute.manager [None req-1fdd4b34-903e-4539-962a-4b2c0e4f5810 tempest-ServerPasswordTestJSON-1615801894 tempest-ServerPasswordTestJSON-1615801894-project-member] [instance: 187f308b-b2b4-4b60-b490-71b8a74f916e] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.220863] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ef0b4880-9fec-4b23-8e86-87806a7f35e5 tempest-VolumesAssistedSnapshotsTest-1054580832 tempest-VolumesAssistedSnapshotsTest-1054580832-project-member] Lock "31700289-ac8c-47a9-b4e0-981b5c9df645" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.246s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.242136] env[68673]: DEBUG nova.compute.manager [None req-1fdd4b34-903e-4539-962a-4b2c0e4f5810 tempest-ServerPasswordTestJSON-1615801894 tempest-ServerPasswordTestJSON-1615801894-project-member] [instance: 187f308b-b2b4-4b60-b490-71b8a74f916e] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.261808] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1fdd4b34-903e-4539-962a-4b2c0e4f5810 tempest-ServerPasswordTestJSON-1615801894 tempest-ServerPasswordTestJSON-1615801894-project-member] Lock "187f308b-b2b4-4b60-b490-71b8a74f916e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.877s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.271649] env[68673]: DEBUG nova.compute.manager [None req-cd6356da-0f3e-4cc5-9794-3385db516be9 tempest-ServerActionsTestOtherA-60787225 tempest-ServerActionsTestOtherA-60787225-project-member] [instance: a31635f4-c7c8-4498-b825-b3a159400096] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.297158] env[68673]: DEBUG nova.compute.manager [None req-cd6356da-0f3e-4cc5-9794-3385db516be9 tempest-ServerActionsTestOtherA-60787225 tempest-ServerActionsTestOtherA-60787225-project-member] [instance: a31635f4-c7c8-4498-b825-b3a159400096] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.318827] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd6356da-0f3e-4cc5-9794-3385db516be9 tempest-ServerActionsTestOtherA-60787225 tempest-ServerActionsTestOtherA-60787225-project-member] Lock "a31635f4-c7c8-4498-b825-b3a159400096" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.381s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.328473] env[68673]: DEBUG nova.compute.manager [None req-b51f0beb-be7e-4179-bacc-4798cb48b661 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] [instance: a4561267-9893-4c7f-b3cb-6887cf740cd2] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.352814] env[68673]: DEBUG nova.compute.manager [None req-b51f0beb-be7e-4179-bacc-4798cb48b661 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] [instance: a4561267-9893-4c7f-b3cb-6887cf740cd2] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.373522] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51f0beb-be7e-4179-bacc-4798cb48b661 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] Lock "a4561267-9893-4c7f-b3cb-6887cf740cd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.661s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.382482] env[68673]: DEBUG nova.compute.manager [None req-ac98c201-1735-476f-adbf-6e0384fe3ab8 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] [instance: 84ce4cab-05fb-46fe-b59f-c2851812c5dd] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.404029] env[68673]: DEBUG nova.compute.manager [None req-ac98c201-1735-476f-adbf-6e0384fe3ab8 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] [instance: 84ce4cab-05fb-46fe-b59f-c2851812c5dd] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.424491] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ac98c201-1735-476f-adbf-6e0384fe3ab8 tempest-ServerRescueNegativeTestJSON-112736049 tempest-ServerRescueNegativeTestJSON-112736049-project-member] Lock "84ce4cab-05fb-46fe-b59f-c2851812c5dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.882s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.433718] env[68673]: DEBUG nova.compute.manager [None req-01d2278e-6966-46e8-ac6b-104a93cbf7af tempest-ServersListShow296Test-559948396 tempest-ServersListShow296Test-559948396-project-member] [instance: c86a2c4a-17c4-48da-aafc-ff2aa2f3699a] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.456096] env[68673]: DEBUG nova.compute.manager [None req-01d2278e-6966-46e8-ac6b-104a93cbf7af tempest-ServersListShow296Test-559948396 tempest-ServersListShow296Test-559948396-project-member] [instance: c86a2c4a-17c4-48da-aafc-ff2aa2f3699a] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.477508] env[68673]: DEBUG oslo_concurrency.lockutils [None req-01d2278e-6966-46e8-ac6b-104a93cbf7af tempest-ServersListShow296Test-559948396 tempest-ServersListShow296Test-559948396-project-member] Lock "c86a2c4a-17c4-48da-aafc-ff2aa2f3699a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.698s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.486537] env[68673]: DEBUG nova.compute.manager [None req-ebe537e1-77cc-4913-8df9-e6c81bf13853 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 11d7e9e7-66ae-4e0c-abad-9542d6716ba6] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.512634] env[68673]: DEBUG nova.compute.manager [None req-ebe537e1-77cc-4913-8df9-e6c81bf13853 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 11d7e9e7-66ae-4e0c-abad-9542d6716ba6] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.538378] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ebe537e1-77cc-4913-8df9-e6c81bf13853 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "11d7e9e7-66ae-4e0c-abad-9542d6716ba6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.913s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.547760] env[68673]: DEBUG nova.compute.manager [None req-1e7f9180-96d6-426d-8353-2b907a4a4b59 tempest-ServerActionsV293TestJSON-289299878 tempest-ServerActionsV293TestJSON-289299878-project-member] [instance: 2538fc39-0bdf-45d4-a0c2-5a0c61b6b39e] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.572488] env[68673]: DEBUG nova.compute.manager [None req-1e7f9180-96d6-426d-8353-2b907a4a4b59 tempest-ServerActionsV293TestJSON-289299878 tempest-ServerActionsV293TestJSON-289299878-project-member] [instance: 2538fc39-0bdf-45d4-a0c2-5a0c61b6b39e] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1126.593346] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1e7f9180-96d6-426d-8353-2b907a4a4b59 tempest-ServerActionsV293TestJSON-289299878 tempest-ServerActionsV293TestJSON-289299878-project-member] Lock "2538fc39-0bdf-45d4-a0c2-5a0c61b6b39e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.422s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.602386] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1126.656022] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.656233] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.657789] env[68673]: INFO nova.compute.claims [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.783599] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.783702] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.971344] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8b6412-6464-4b7d-a918-ae0401f7dee9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.979103] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce15605d-38d6-43cc-974e-31ce14c6d6d1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.008593] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d81e455-686f-4ace-ad8c-72ae6a0e518c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.015630] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f5d561-093e-40ea-be31-34a3c41dc5ea {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.029610] env[68673]: DEBUG nova.compute.provider_tree [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.038122] env[68673]: DEBUG nova.scheduler.client.report [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1127.052616] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.396s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.053090] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1127.085222] env[68673]: DEBUG nova.compute.utils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1127.085474] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1127.085663] env[68673]: DEBUG nova.network.neutron [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1127.097752] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1127.144950] env[68673]: DEBUG nova.policy [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '932d8564eb7e43b6934a06f0b4487187', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9be60de1317483ba97bffbbd2b52eae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1127.164203] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1127.192021] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1127.192021] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1127.192021] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.192226] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1127.192226] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.192226] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1127.192226] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1127.192783] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1127.193142] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1127.193468] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1127.196081] env[68673]: DEBUG nova.virt.hardware [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1127.196081] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42183ca5-9769-4959-ac0a-09c791af67b8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.203101] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c406ca63-8fa5-44fc-8497-12ecc726e73a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.742013] env[68673]: DEBUG nova.network.neutron [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Successfully created port: cbe51682-6976-4c56-97d2-2e59640321e7 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.313754] env[68673]: DEBUG nova.compute.manager [req-e9a6cdcd-4aae-4ca0-b157-760a18d7c66a req-98460469-254b-4b35-affb-5ebc2e0be0b4 service nova] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Received event network-vif-plugged-cbe51682-6976-4c56-97d2-2e59640321e7 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1128.314035] env[68673]: DEBUG oslo_concurrency.lockutils [req-e9a6cdcd-4aae-4ca0-b157-760a18d7c66a req-98460469-254b-4b35-affb-5ebc2e0be0b4 service nova] Acquiring lock "bfb20e23-e3fa-40b8-a114-222c148db6b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.314171] env[68673]: DEBUG oslo_concurrency.lockutils [req-e9a6cdcd-4aae-4ca0-b157-760a18d7c66a req-98460469-254b-4b35-affb-5ebc2e0be0b4 service nova] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.314333] env[68673]: DEBUG oslo_concurrency.lockutils [req-e9a6cdcd-4aae-4ca0-b157-760a18d7c66a req-98460469-254b-4b35-affb-5ebc2e0be0b4 service nova] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.314492] env[68673]: DEBUG nova.compute.manager [req-e9a6cdcd-4aae-4ca0-b157-760a18d7c66a req-98460469-254b-4b35-affb-5ebc2e0be0b4 service nova] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] No waiting events found dispatching network-vif-plugged-cbe51682-6976-4c56-97d2-2e59640321e7 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1128.314658] env[68673]: WARNING nova.compute.manager [req-e9a6cdcd-4aae-4ca0-b157-760a18d7c66a req-98460469-254b-4b35-affb-5ebc2e0be0b4 service nova] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Received unexpected event network-vif-plugged-cbe51682-6976-4c56-97d2-2e59640321e7 for instance with vm_state building and task_state spawning. [ 1128.416631] env[68673]: DEBUG nova.network.neutron [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Successfully updated port: cbe51682-6976-4c56-97d2-2e59640321e7 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1128.427775] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "refresh_cache-bfb20e23-e3fa-40b8-a114-222c148db6b0" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.427928] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquired lock "refresh_cache-bfb20e23-e3fa-40b8-a114-222c148db6b0" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.428088] env[68673]: DEBUG nova.network.neutron [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1128.493499] env[68673]: DEBUG nova.network.neutron [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1128.663167] env[68673]: DEBUG nova.network.neutron [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Updating instance_info_cache with network_info: [{"id": "cbe51682-6976-4c56-97d2-2e59640321e7", "address": "fa:16:3e:a8:66:d3", "network": {"id": "3299e0b0-0ed9-44f8-afd6-c4aca136b568", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1561179603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9be60de1317483ba97bffbbd2b52eae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbe51682-69", "ovs_interfaceid": "cbe51682-6976-4c56-97d2-2e59640321e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.676904] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Releasing lock "refresh_cache-bfb20e23-e3fa-40b8-a114-222c148db6b0" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.677338] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Instance network_info: |[{"id": "cbe51682-6976-4c56-97d2-2e59640321e7", "address": "fa:16:3e:a8:66:d3", "network": {"id": "3299e0b0-0ed9-44f8-afd6-c4aca136b568", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1561179603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9be60de1317483ba97bffbbd2b52eae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbe51682-69", "ovs_interfaceid": "cbe51682-6976-4c56-97d2-2e59640321e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1128.677650] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:66:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '140f4558-c11e-4af4-ab36-234e2d2f80a4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbe51682-6976-4c56-97d2-2e59640321e7', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1128.685090] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Creating folder: Project (a9be60de1317483ba97bffbbd2b52eae). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1128.685696] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86032e2d-490f-4701-ac72-d1d5d86ad386 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.695947] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Created folder: Project (a9be60de1317483ba97bffbbd2b52eae) in parent group-v685311. [ 1128.696145] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Creating folder: Instances. Parent ref: group-v685374. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1128.696365] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6cff804-fdcf-4b6e-97b7-1b7a7c134408 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.704627] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Created folder: Instances in parent group-v685374. [ 1128.704851] env[68673]: DEBUG oslo.service.loopingcall [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1128.705040] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1128.705234] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1aba865-cf34-475e-8261-f16c7e9678e1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.724889] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1128.724889] env[68673]: value = "task-3433517" [ 1128.724889] env[68673]: _type = "Task" [ 1128.724889] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.734460] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433517, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.783691] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.236171] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433517, 'name': CreateVM_Task, 'duration_secs': 0.284136} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.236392] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1129.237091] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.237267] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.237568] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1129.237826] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd08b682-e626-4a9a-ab5b-c16d73c943a4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.242288] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Waiting for the task: (returnval){ [ 1129.242288] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52828150-0ad7-e482-0ac8-d0687bcd2b6f" [ 1129.242288] env[68673]: _type = "Task" [ 1129.242288] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.250961] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52828150-0ad7-e482-0ac8-d0687bcd2b6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.752069] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.752393] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1129.752469] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.427401] env[68673]: DEBUG nova.compute.manager [req-01dab4aa-3db9-4ea3-8328-25fe8b26171d req-586be427-0063-4e29-93c3-ea6fd9fba20b service nova] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Received event network-changed-cbe51682-6976-4c56-97d2-2e59640321e7 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1130.427631] env[68673]: DEBUG nova.compute.manager [req-01dab4aa-3db9-4ea3-8328-25fe8b26171d req-586be427-0063-4e29-93c3-ea6fd9fba20b service nova] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Refreshing instance network info cache due to event network-changed-cbe51682-6976-4c56-97d2-2e59640321e7. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1130.427846] env[68673]: DEBUG oslo_concurrency.lockutils [req-01dab4aa-3db9-4ea3-8328-25fe8b26171d req-586be427-0063-4e29-93c3-ea6fd9fba20b service nova] Acquiring lock "refresh_cache-bfb20e23-e3fa-40b8-a114-222c148db6b0" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.427986] env[68673]: DEBUG oslo_concurrency.lockutils [req-01dab4aa-3db9-4ea3-8328-25fe8b26171d req-586be427-0063-4e29-93c3-ea6fd9fba20b service nova] Acquired lock "refresh_cache-bfb20e23-e3fa-40b8-a114-222c148db6b0" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.428161] env[68673]: DEBUG nova.network.neutron [req-01dab4aa-3db9-4ea3-8328-25fe8b26171d req-586be427-0063-4e29-93c3-ea6fd9fba20b service nova] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Refreshing network info cache for port cbe51682-6976-4c56-97d2-2e59640321e7 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1130.737936] env[68673]: DEBUG nova.network.neutron [req-01dab4aa-3db9-4ea3-8328-25fe8b26171d req-586be427-0063-4e29-93c3-ea6fd9fba20b service nova] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Updated VIF entry in instance network info cache for port cbe51682-6976-4c56-97d2-2e59640321e7. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1130.738310] env[68673]: DEBUG nova.network.neutron [req-01dab4aa-3db9-4ea3-8328-25fe8b26171d req-586be427-0063-4e29-93c3-ea6fd9fba20b service nova] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Updating instance_info_cache with network_info: [{"id": "cbe51682-6976-4c56-97d2-2e59640321e7", "address": "fa:16:3e:a8:66:d3", "network": {"id": "3299e0b0-0ed9-44f8-afd6-c4aca136b568", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1561179603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9be60de1317483ba97bffbbd2b52eae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbe51682-69", "ovs_interfaceid": "cbe51682-6976-4c56-97d2-2e59640321e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.748313] env[68673]: DEBUG oslo_concurrency.lockutils [req-01dab4aa-3db9-4ea3-8328-25fe8b26171d req-586be427-0063-4e29-93c3-ea6fd9fba20b service nova] Releasing lock "refresh_cache-bfb20e23-e3fa-40b8-a114-222c148db6b0" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.010852] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.368284] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "c12e8044-6e77-44a6-866e-1036f69113a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.368598] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "c12e8044-6e77-44a6-866e-1036f69113a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.922375] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b9f1d9b2-92a5-4382-ba41-252906d6a5ce tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "9bbfc4f2-cb23-4813-808d-ab03a97acfbd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.922375] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b9f1d9b2-92a5-4382-ba41-252906d6a5ce tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "9bbfc4f2-cb23-4813-808d-ab03a97acfbd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.653894] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5231cdf5-9940-486e-91ed-f99e4c3c08c9 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "42eae7a3-1757-40f6-a194-58a4eb1ee3a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.653894] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5231cdf5-9940-486e-91ed-f99e4c3c08c9 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "42eae7a3-1757-40f6-a194-58a4eb1ee3a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.824027] env[68673]: WARNING oslo_vmware.rw_handles [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1173.824027] env[68673]: ERROR oslo_vmware.rw_handles [ 1173.824470] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1173.826468] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1173.826772] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Copying Virtual Disk [datastore1] vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/462c9920-b86b-467c-9643-e6617d3b9520/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1173.827342] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53f831b3-5624-4a7b-9ccf-1fb57604125d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.836709] env[68673]: DEBUG oslo_vmware.api [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for the task: (returnval){ [ 1173.836709] env[68673]: value = "task-3433518" [ 1173.836709] env[68673]: _type = "Task" [ 1173.836709] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.484366] env[68673]: DEBUG oslo_vmware.exceptions [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1174.484790] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1174.485224] env[68673]: ERROR nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1174.485224] env[68673]: Faults: ['InvalidArgument'] [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Traceback (most recent call last): [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] yield resources [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] self.driver.spawn(context, instance, image_meta, [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] self._fetch_image_if_missing(context, vi) [ 1174.485224] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] image_cache(vi, tmp_image_ds_loc) [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] vm_util.copy_virtual_disk( [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] session._wait_for_task(vmdk_copy_task) [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] return self.wait_for_task(task_ref) [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] return evt.wait() [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] result = hub.switch() [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1174.485889] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] return self.greenlet.switch() [ 1174.486645] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1174.486645] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] self.f(*self.args, **self.kw) [ 1174.486645] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1174.486645] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] raise exceptions.translate_fault(task_info.error) [ 1174.486645] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1174.486645] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Faults: ['InvalidArgument'] [ 1174.486645] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] [ 1174.486645] env[68673]: INFO nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Terminating instance [ 1174.487843] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1174.487942] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1174.488239] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.488431] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1174.489172] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8710d043-c14e-42b1-91fa-0efff95b505a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.491674] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-872d4921-afc3-4770-8efb-2a9359084bd3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.497363] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1174.497583] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b3a67e5-519b-4bff-b698-2ed8f2a9795c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.499660] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1174.499834] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1174.500761] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-433e58ee-1627-449c-a4d7-4649e7acf9d5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.505288] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Waiting for the task: (returnval){ [ 1174.505288] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52466183-2491-e3b7-3665-53f6939e44ad" [ 1174.505288] env[68673]: _type = "Task" [ 1174.505288] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.512808] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52466183-2491-e3b7-3665-53f6939e44ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.574632] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1174.574856] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1174.575104] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Deleting the datastore file [datastore1] d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1174.575313] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54eae776-0117-4bd0-ae27-bddb6d047726 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.581950] env[68673]: DEBUG oslo_vmware.api [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for the task: (returnval){ [ 1174.581950] env[68673]: value = "task-3433520" [ 1174.581950] env[68673]: _type = "Task" [ 1174.581950] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.592375] env[68673]: DEBUG oslo_vmware.api [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Task: {'id': task-3433520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.017165] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1175.017165] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Creating directory with path [datastore1] vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1175.017165] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e84367ac-6a46-4a48-a11c-b2ee3ab76812 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.028944] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Created directory with path [datastore1] vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1175.029153] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Fetch image to [datastore1] vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1175.029327] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1175.030086] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d82c36e-dfb1-4b96-bbde-1896b176eab8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.036578] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3bf46b-ff5e-4bce-88d0-89f7b3d58b27 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.045307] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efe75ae-a3ac-4084-940f-af5ea5b3e814 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.076323] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fa3e0e-05d1-450e-bbe2-39394f6ada50 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.082041] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-787752c1-5707-4273-82c8-a2ae6a13a376 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.090556] env[68673]: DEBUG oslo_vmware.api [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Task: {'id': task-3433520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066214} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.090791] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1175.090968] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1175.091148] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1175.091318] env[68673]: INFO nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1175.093446] env[68673]: DEBUG nova.compute.claims [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1175.093611] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.093824] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.104593] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1175.158796] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1175.219998] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1175.220219] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1175.400938] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4dcd4c6-fe13-464d-a1fb-49592aad3f9c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.407771] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafcfdbc-e359-4e47-8042-1e15ec93b978 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.436560] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca223791-ef3b-4715-9747-66a39dc7372b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.443813] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f2036c-4e64-403a-b55a-86b5b9c9e155 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.456663] env[68673]: DEBUG nova.compute.provider_tree [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.464680] env[68673]: DEBUG nova.scheduler.client.report [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1175.480998] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.387s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.481528] env[68673]: ERROR nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1175.481528] env[68673]: Faults: ['InvalidArgument'] [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Traceback (most recent call last): [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] self.driver.spawn(context, instance, image_meta, [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] self._fetch_image_if_missing(context, vi) [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] image_cache(vi, tmp_image_ds_loc) [ 1175.481528] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] vm_util.copy_virtual_disk( [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] session._wait_for_task(vmdk_copy_task) [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] return self.wait_for_task(task_ref) [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] return evt.wait() [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] result = hub.switch() [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] return self.greenlet.switch() [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1175.481948] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] self.f(*self.args, **self.kw) [ 1175.482340] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1175.482340] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] raise exceptions.translate_fault(task_info.error) [ 1175.482340] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1175.482340] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Faults: ['InvalidArgument'] [ 1175.482340] env[68673]: ERROR nova.compute.manager [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] [ 1175.482340] env[68673]: DEBUG nova.compute.utils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1175.483695] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Build of instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 was re-scheduled: A specified parameter was not correct: fileType [ 1175.483695] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1175.484058] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1175.484261] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1175.484424] env[68673]: DEBUG nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1175.484664] env[68673]: DEBUG nova.network.neutron [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1175.759766] env[68673]: DEBUG nova.network.neutron [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.772810] env[68673]: INFO nova.compute.manager [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Took 0.29 seconds to deallocate network for instance. [ 1175.872794] env[68673]: INFO nova.scheduler.client.report [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Deleted allocations for instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 [ 1175.891821] env[68673]: DEBUG oslo_concurrency.lockutils [None req-cd328cc7-2623-42d5-8a86-0a3f43f16edd tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 532.495s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.892371] env[68673]: DEBUG oslo_concurrency.lockutils [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 134.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.892586] env[68673]: DEBUG oslo_concurrency.lockutils [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.892871] env[68673]: DEBUG oslo_concurrency.lockutils [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.893054] env[68673]: DEBUG oslo_concurrency.lockutils [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.895412] env[68673]: INFO nova.compute.manager [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Terminating instance [ 1175.897277] env[68673]: DEBUG nova.compute.manager [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1175.897489] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1175.898173] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8e980d1-6a40-4f1d-8e36-371166b5812c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.905917] env[68673]: DEBUG nova.compute.manager [None req-16562231-9fb8-4183-8eab-bfafc1c51be7 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: 74928628-b30b-4e88-a2a4-82797d5c7965] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1175.911557] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22caa5e3-4415-4bd3-bdeb-65ea60e833f0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.940983] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d9b3a1bb-556b-4d86-a14c-a0d4000a2c47 could not be found. [ 1175.941127] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1175.941276] env[68673]: INFO nova.compute.manager [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1175.941542] env[68673]: DEBUG oslo.service.loopingcall [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1175.942593] env[68673]: DEBUG nova.compute.manager [None req-16562231-9fb8-4183-8eab-bfafc1c51be7 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: 74928628-b30b-4e88-a2a4-82797d5c7965] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1175.942818] env[68673]: DEBUG nova.compute.manager [-] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1175.942919] env[68673]: DEBUG nova.network.neutron [-] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1175.967022] env[68673]: DEBUG oslo_concurrency.lockutils [None req-16562231-9fb8-4183-8eab-bfafc1c51be7 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "74928628-b30b-4e88-a2a4-82797d5c7965" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.913s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.969104] env[68673]: DEBUG nova.network.neutron [-] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.976633] env[68673]: DEBUG nova.compute.manager [None req-83807532-cf16-4304-ba06-6cb8b55374c6 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] [instance: 43a38849-0ca1-4b73-b677-ca2baacff863] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1175.979073] env[68673]: INFO nova.compute.manager [-] [instance: d9b3a1bb-556b-4d86-a14c-a0d4000a2c47] Took 0.04 seconds to deallocate network for instance. [ 1176.009882] env[68673]: DEBUG nova.compute.manager [None req-83807532-cf16-4304-ba06-6cb8b55374c6 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] [instance: 43a38849-0ca1-4b73-b677-ca2baacff863] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1176.032141] env[68673]: DEBUG oslo_concurrency.lockutils [None req-83807532-cf16-4304-ba06-6cb8b55374c6 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] Lock "43a38849-0ca1-4b73-b677-ca2baacff863" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.972s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.042349] env[68673]: DEBUG nova.compute.manager [None req-561aa1bd-8b8a-4948-978f-1f004bf2c90b tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: f1f814f6-3e68-4729-8487-02e10c055cfa] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1176.067262] env[68673]: DEBUG nova.compute.manager [None req-561aa1bd-8b8a-4948-978f-1f004bf2c90b tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: f1f814f6-3e68-4729-8487-02e10c055cfa] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1176.093243] env[68673]: DEBUG oslo_concurrency.lockutils [None req-561aa1bd-8b8a-4948-978f-1f004bf2c90b tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "f1f814f6-3e68-4729-8487-02e10c055cfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.330s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.095163] env[68673]: DEBUG oslo_concurrency.lockutils [None req-425b018f-33da-440c-a867-645965acf556 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "d9b3a1bb-556b-4d86-a14c-a0d4000a2c47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.102285] env[68673]: DEBUG nova.compute.manager [None req-0182aa04-9446-4762-90b7-be01034a8379 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] [instance: 55fc09e9-581f-4ef2-a513-1b0c2f33dd75] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1176.125570] env[68673]: DEBUG nova.compute.manager [None req-0182aa04-9446-4762-90b7-be01034a8379 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] [instance: 55fc09e9-581f-4ef2-a513-1b0c2f33dd75] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1176.146943] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0182aa04-9446-4762-90b7-be01034a8379 tempest-ServerShowV247Test-1634783185 tempest-ServerShowV247Test-1634783185-project-member] Lock "55fc09e9-581f-4ef2-a513-1b0c2f33dd75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.257s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.156958] env[68673]: DEBUG nova.compute.manager [None req-9dac3052-c4b1-44c9-9049-392057f8f2d9 tempest-AttachInterfacesV270Test-2053391187 tempest-AttachInterfacesV270Test-2053391187-project-member] [instance: 7d00a8a9-3ddc-4555-9025-9d06479b34dc] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1176.183604] env[68673]: DEBUG nova.compute.manager [None req-9dac3052-c4b1-44c9-9049-392057f8f2d9 tempest-AttachInterfacesV270Test-2053391187 tempest-AttachInterfacesV270Test-2053391187-project-member] [instance: 7d00a8a9-3ddc-4555-9025-9d06479b34dc] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1176.207254] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9dac3052-c4b1-44c9-9049-392057f8f2d9 tempest-AttachInterfacesV270Test-2053391187 tempest-AttachInterfacesV270Test-2053391187-project-member] Lock "7d00a8a9-3ddc-4555-9025-9d06479b34dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.351s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.216997] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1176.265748] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.266010] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.267595] env[68673]: INFO nova.compute.claims [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1176.510769] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae337f5-2122-4164-a573-6c290a2b4fb5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.519113] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e9d7d9-8f6f-4ba5-a351-ec09b326cdda {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.550229] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27bb232e-42ac-4afd-a532-c51e2097bdc2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.557175] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e205f06-49e7-4175-a0a7-c8f342bf68f5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.571292] env[68673]: DEBUG nova.compute.provider_tree [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.581257] env[68673]: DEBUG nova.scheduler.client.report [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1176.593826] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.328s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.594315] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1176.630041] env[68673]: DEBUG nova.compute.utils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1176.630968] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1176.631154] env[68673]: DEBUG nova.network.neutron [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1176.642070] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1176.711148] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1176.714576] env[68673]: DEBUG nova.policy [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f7098825a5f4469ae441d3dde1461fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afd7de5880f44f51a43d504b9c6fe8da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1176.734760] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:33:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='7ce1a803-8ad4-49c4-8bfe-6442fea06847',id=38,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-569628985',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1176.735016] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1176.735188] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.735371] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1176.735520] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.735669] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1176.735868] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1176.736041] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1176.736212] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1176.736426] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1176.736619] env[68673]: DEBUG nova.virt.hardware [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1176.737895] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a2ba36-7813-49be-a26f-133a175b5c2d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.745142] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f30dec4-2e66-41fe-963a-2ada3788d8ed {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.140410] env[68673]: DEBUG nova.network.neutron [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Successfully created port: daef18da-fc35-4d38-90d3-fd94967f3da7 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1177.945884] env[68673]: DEBUG nova.compute.manager [req-119e5428-dc31-49bc-a28b-c9ea85174a0a req-bec4985f-59b9-486e-811f-3feabe4125f6 service nova] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Received event network-vif-plugged-daef18da-fc35-4d38-90d3-fd94967f3da7 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1177.945884] env[68673]: DEBUG oslo_concurrency.lockutils [req-119e5428-dc31-49bc-a28b-c9ea85174a0a req-bec4985f-59b9-486e-811f-3feabe4125f6 service nova] Acquiring lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.945884] env[68673]: DEBUG oslo_concurrency.lockutils [req-119e5428-dc31-49bc-a28b-c9ea85174a0a req-bec4985f-59b9-486e-811f-3feabe4125f6 service nova] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.945884] env[68673]: DEBUG oslo_concurrency.lockutils [req-119e5428-dc31-49bc-a28b-c9ea85174a0a req-bec4985f-59b9-486e-811f-3feabe4125f6 service nova] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.946428] env[68673]: DEBUG nova.compute.manager [req-119e5428-dc31-49bc-a28b-c9ea85174a0a req-bec4985f-59b9-486e-811f-3feabe4125f6 service nova] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] No waiting events found dispatching network-vif-plugged-daef18da-fc35-4d38-90d3-fd94967f3da7 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1177.946559] env[68673]: WARNING nova.compute.manager [req-119e5428-dc31-49bc-a28b-c9ea85174a0a req-bec4985f-59b9-486e-811f-3feabe4125f6 service nova] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Received unexpected event network-vif-plugged-daef18da-fc35-4d38-90d3-fd94967f3da7 for instance with vm_state building and task_state spawning. [ 1178.303553] env[68673]: DEBUG nova.network.neutron [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Successfully updated port: daef18da-fc35-4d38-90d3-fd94967f3da7 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.319326] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "refresh_cache-59b4e1de-612b-40f7-bc82-8c5eb3701b7c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.319326] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquired lock "refresh_cache-59b4e1de-612b-40f7-bc82-8c5eb3701b7c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.319326] env[68673]: DEBUG nova.network.neutron [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1178.402998] env[68673]: DEBUG nova.network.neutron [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1178.773460] env[68673]: DEBUG nova.network.neutron [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Updating instance_info_cache with network_info: [{"id": "daef18da-fc35-4d38-90d3-fd94967f3da7", "address": "fa:16:3e:83:d2:c0", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaef18da-fc", "ovs_interfaceid": "daef18da-fc35-4d38-90d3-fd94967f3da7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.795084] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Releasing lock "refresh_cache-59b4e1de-612b-40f7-bc82-8c5eb3701b7c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.795084] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Instance network_info: |[{"id": "daef18da-fc35-4d38-90d3-fd94967f3da7", "address": "fa:16:3e:83:d2:c0", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaef18da-fc", "ovs_interfaceid": "daef18da-fc35-4d38-90d3-fd94967f3da7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1178.795516] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:d2:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'daef18da-fc35-4d38-90d3-fd94967f3da7', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1178.804841] env[68673]: DEBUG oslo.service.loopingcall [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1178.805402] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1178.805633] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5ebd0b7-cb22-4614-acd5-935cab7db941 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.826308] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1178.826308] env[68673]: value = "task-3433521" [ 1178.826308] env[68673]: _type = "Task" [ 1178.826308] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.833877] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433521, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.340978] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433521, 'name': CreateVM_Task, 'duration_secs': 0.330898} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.341235] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1179.341828] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.342008] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.342328] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1179.342571] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-354eaf3b-f8da-4eb4-b02a-984f5086b0c7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.347937] env[68673]: DEBUG oslo_vmware.api [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for the task: (returnval){ [ 1179.347937] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52cfee3c-f8c4-284c-c39f-cf0e69f0a323" [ 1179.347937] env[68673]: _type = "Task" [ 1179.347937] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.358055] env[68673]: DEBUG oslo_vmware.api [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52cfee3c-f8c4-284c-c39f-cf0e69f0a323, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.860292] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.860578] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1179.864021] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.021782] env[68673]: DEBUG nova.compute.manager [req-5e451d6e-f42a-43c8-8f3e-44fdf3122981 req-18541167-0d14-4473-8034-e9c8b2c4cfb8 service nova] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Received event network-changed-daef18da-fc35-4d38-90d3-fd94967f3da7 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1180.021989] env[68673]: DEBUG nova.compute.manager [req-5e451d6e-f42a-43c8-8f3e-44fdf3122981 req-18541167-0d14-4473-8034-e9c8b2c4cfb8 service nova] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Refreshing instance network info cache due to event network-changed-daef18da-fc35-4d38-90d3-fd94967f3da7. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1180.022217] env[68673]: DEBUG oslo_concurrency.lockutils [req-5e451d6e-f42a-43c8-8f3e-44fdf3122981 req-18541167-0d14-4473-8034-e9c8b2c4cfb8 service nova] Acquiring lock "refresh_cache-59b4e1de-612b-40f7-bc82-8c5eb3701b7c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.022362] env[68673]: DEBUG oslo_concurrency.lockutils [req-5e451d6e-f42a-43c8-8f3e-44fdf3122981 req-18541167-0d14-4473-8034-e9c8b2c4cfb8 service nova] Acquired lock "refresh_cache-59b4e1de-612b-40f7-bc82-8c5eb3701b7c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.022519] env[68673]: DEBUG nova.network.neutron [req-5e451d6e-f42a-43c8-8f3e-44fdf3122981 req-18541167-0d14-4473-8034-e9c8b2c4cfb8 service nova] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Refreshing network info cache for port daef18da-fc35-4d38-90d3-fd94967f3da7 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1180.638257] env[68673]: DEBUG nova.network.neutron [req-5e451d6e-f42a-43c8-8f3e-44fdf3122981 req-18541167-0d14-4473-8034-e9c8b2c4cfb8 service nova] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Updated VIF entry in instance network info cache for port daef18da-fc35-4d38-90d3-fd94967f3da7. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1180.639028] env[68673]: DEBUG nova.network.neutron [req-5e451d6e-f42a-43c8-8f3e-44fdf3122981 req-18541167-0d14-4473-8034-e9c8b2c4cfb8 service nova] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Updating instance_info_cache with network_info: [{"id": "daef18da-fc35-4d38-90d3-fd94967f3da7", "address": "fa:16:3e:83:d2:c0", "network": {"id": "f28c0dd2-2649-454b-979f-637e2e6cddb4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "706e55e049ed41a4bb47b7f5d092a466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaef18da-fc", "ovs_interfaceid": "daef18da-fc35-4d38-90d3-fd94967f3da7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.657597] env[68673]: DEBUG oslo_concurrency.lockutils [req-5e451d6e-f42a-43c8-8f3e-44fdf3122981 req-18541167-0d14-4473-8034-e9c8b2c4cfb8 service nova] Releasing lock "refresh_cache-59b4e1de-612b-40f7-bc82-8c5eb3701b7c" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.779646] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.783472] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.783635] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1182.691492] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "bb290679-267b-4dc2-8337-896d5208c6cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.691754] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "bb290679-267b-4dc2-8337-896d5208c6cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.783761] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.784091] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1184.784171] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1184.806255] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.806421] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.806633] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.806777] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.806902] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.807030] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.807167] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.807300] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.807421] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.807535] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1184.807653] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1185.783888] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1185.784193] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1185.796027] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.796027] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.796027] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.796027] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1185.797287] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832540f7-eb4d-432b-9553-6ba5dafb6efa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.807525] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145204f0-98c9-4604-be94-c8e1c4c9d773 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.824021] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ab79ca-7e37-4fb3-a611-b3c57a31980e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.828496] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ac15b2-d43a-41c7-9028-f779b3c8d10a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.860600] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180891MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1185.860747] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.860934] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.011936] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 579c535d-7061-4822-8f7f-50b36ddfd44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.012160] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance dcb71746-662e-4ace-afcb-a997d236f12b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.012326] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.012480] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.012636] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.012767] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.012903] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.013045] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.013175] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.013299] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.024859] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.035404] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.044953] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2c59faff-0fb2-452c-9b1f-3dde3767d699 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.055102] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.063885] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 8aed9c82-b082-4f49-acf7-015a7e78e452 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.072812] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance af1da6ac-13b7-4271-b917-204dfe91cced has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.081163] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.089847] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 42eae7a3-1757-40f6-a194-58a4eb1ee3a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.098215] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1186.098487] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1186.098674] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1186.114102] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing inventories for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1186.127620] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Updating ProviderTree inventory for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1186.127620] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Updating inventory in ProviderTree for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1186.137644] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing aggregate associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, aggregates: None {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1186.160414] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing trait associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1186.383245] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da95cec-a9e3-4748-b18b-137e990b5e2f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.391089] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a292c9b2-7214-4ebd-8188-8be9bdc01f9b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.421693] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03174e34-8f68-470b-bbc8-8e588a97a481 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.428778] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03692dc0-954c-48b3-a1d1-2ccaef6e6854 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.442395] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.452102] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1186.472288] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1186.472479] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.612s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.472492] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.783453] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.783796] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.783858] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.784267] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.784442] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances with incomplete migration {{(pid=68673) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1189.785679] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.808712] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.791624] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.791905] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1191.801085] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] There are 0 instances to clean {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1212.077837] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.102206] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Getting list of instances from cluster (obj){ [ 1212.102206] env[68673]: value = "domain-c8" [ 1212.102206] env[68673]: _type = "ClusterComputeResource" [ 1212.102206] env[68673]: } {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1212.103574] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082e475c-c570-4e02-81d8-b643e520e767 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.121074] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Got total of 10 instances {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1212.121265] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 579c535d-7061-4822-8f7f-50b36ddfd44b {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.121452] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid dcb71746-662e-4ace-afcb-a997d236f12b {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.121609] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid adb818a9-e799-4f57-93f6-ee4e32104d61 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.121761] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 12c03ca5-3526-4ebe-84af-b2027a6e50ac {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.121909] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.122068] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid f4e540de-0b46-424b-894d-8ec0416d9828 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.122217] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.122359] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 94d40e8f-639a-4695-8d3d-1b0d81e29695 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.122542] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid bfb20e23-e3fa-40b8-a114-222c148db6b0 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.122698] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 59b4e1de-612b-40f7-bc82-8c5eb3701b7c {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.122957] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "579c535d-7061-4822-8f7f-50b36ddfd44b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.123213] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "dcb71746-662e-4ace-afcb-a997d236f12b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.123415] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "adb818a9-e799-4f57-93f6-ee4e32104d61" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.123902] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.123902] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.123980] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "f4e540de-0b46-424b-894d-8ec0416d9828" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.124166] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.124363] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.124553] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.124911] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.832645] env[68673]: WARNING oslo_vmware.rw_handles [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1221.832645] env[68673]: ERROR oslo_vmware.rw_handles [ 1221.833237] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1221.835208] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1221.835552] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Copying Virtual Disk [datastore1] vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/890073d6-cf02-4376-ac35-6abc3c222c68/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1221.835864] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d164a7f-2325-4714-b27f-e6fb2c28e555 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.843891] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Waiting for the task: (returnval){ [ 1221.843891] env[68673]: value = "task-3433522" [ 1221.843891] env[68673]: _type = "Task" [ 1221.843891] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.851815] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Task: {'id': task-3433522, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.354445] env[68673]: DEBUG oslo_vmware.exceptions [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1222.354734] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.355303] env[68673]: ERROR nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1222.355303] env[68673]: Faults: ['InvalidArgument'] [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Traceback (most recent call last): [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] yield resources [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] self.driver.spawn(context, instance, image_meta, [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] self._fetch_image_if_missing(context, vi) [ 1222.355303] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] image_cache(vi, tmp_image_ds_loc) [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] vm_util.copy_virtual_disk( [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] session._wait_for_task(vmdk_copy_task) [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] return self.wait_for_task(task_ref) [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] return evt.wait() [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] result = hub.switch() [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1222.355729] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] return self.greenlet.switch() [ 1222.356195] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1222.356195] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] self.f(*self.args, **self.kw) [ 1222.356195] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1222.356195] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] raise exceptions.translate_fault(task_info.error) [ 1222.356195] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1222.356195] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Faults: ['InvalidArgument'] [ 1222.356195] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] [ 1222.356195] env[68673]: INFO nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Terminating instance [ 1222.357279] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.357492] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1222.357727] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0feb8231-8f77-44eb-8089-be090566dc02 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.359940] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1222.360144] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1222.360867] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279c8064-9f3d-4856-975c-70ebebb2d3aa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.369840] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1222.369840] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-027beed0-7a58-45ca-b86f-4d0888eb195b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.370059] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1222.370238] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1222.371224] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10b962e3-129e-4690-b566-92087b7e29f3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.376016] env[68673]: DEBUG oslo_vmware.api [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for the task: (returnval){ [ 1222.376016] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5260b3ff-9f69-c3ac-cdfb-9cddd62398e2" [ 1222.376016] env[68673]: _type = "Task" [ 1222.376016] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.383141] env[68673]: DEBUG oslo_vmware.api [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5260b3ff-9f69-c3ac-cdfb-9cddd62398e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.886300] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1222.886638] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Creating directory with path [datastore1] vmware_temp/b966dbbe-30ab-439e-9197-28e3daa67bc7/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1222.886889] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd1adee1-7d04-4f2a-b6bf-edcee4a37572 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.906174] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Created directory with path [datastore1] vmware_temp/b966dbbe-30ab-439e-9197-28e3daa67bc7/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1222.906374] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Fetch image to [datastore1] vmware_temp/b966dbbe-30ab-439e-9197-28e3daa67bc7/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1222.906600] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/b966dbbe-30ab-439e-9197-28e3daa67bc7/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1222.907382] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66387340-860e-497a-8254-938979581e85 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.914183] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad52e12-9f93-4bd2-a7fb-916454919c5d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.923395] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa93bde2-a390-4c02-87b8-c0ccd7254288 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.955359] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b20ab2a-8f27-4498-a52f-cc4506bb25cc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.961008] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a277d102-5cdc-483b-ab9a-3524e70441e2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.978018] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1222.978301] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1222.978497] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Deleting the datastore file [datastore1] 579c535d-7061-4822-8f7f-50b36ddfd44b {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.978755] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81766a50-1e84-451a-9cc8-82acf5597e81 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.982228] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1222.988318] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Waiting for the task: (returnval){ [ 1222.988318] env[68673]: value = "task-3433524" [ 1222.988318] env[68673]: _type = "Task" [ 1222.988318] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.995294] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Task: {'id': task-3433524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.148503] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1223.149888] env[68673]: ERROR nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = getattr(controller, method)(*args, **kwargs) [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._get(image_id) [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1223.149888] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] resp, body = self.http_client.get(url, headers=header) [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.request(url, 'GET', **kwargs) [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._handle_response(resp) [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exc.from_response(resp, resp.content) [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] During handling of the above exception, another exception occurred: [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1223.150279] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] yield resources [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self.driver.spawn(context, instance, image_meta, [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._fetch_image_if_missing(context, vi) [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image_fetch(context, vi, tmp_image_ds_loc) [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] images.fetch_image( [ 1223.150643] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] metadata = IMAGE_API.get(context, image_ref) [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return session.show(context, image_id, [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] _reraise_translated_image_exception(image_id) [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise new_exc.with_traceback(exc_trace) [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = getattr(controller, method)(*args, **kwargs) [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1223.151048] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._get(image_id) [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] resp, body = self.http_client.get(url, headers=header) [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.request(url, 'GET', **kwargs) [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._handle_response(resp) [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exc.from_response(resp, resp.content) [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1223.151424] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1223.151792] env[68673]: INFO nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Terminating instance [ 1223.151863] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.152095] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1223.152730] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1223.152919] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1223.153164] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c547cf8-d5a2-428f-a055-2aff6e6346e4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.156111] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45de1ca-0eab-401d-bfff-048164a61002 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.163390] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1223.163612] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-368a1d20-e0f7-4005-a40d-ab9562b3c81a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.165853] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1223.166067] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1223.167023] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8185c964-31a5-4ad2-a953-561afbf7e33d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.171734] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Waiting for the task: (returnval){ [ 1223.171734] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52664180-2401-114b-f37c-e3afdb11e1f6" [ 1223.171734] env[68673]: _type = "Task" [ 1223.171734] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.178727] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52664180-2401-114b-f37c-e3afdb11e1f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.225233] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1223.225499] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1223.225715] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Deleting the datastore file [datastore1] dcb71746-662e-4ace-afcb-a997d236f12b {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1223.225983] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f47b299-02b9-4eab-82cd-98e8ad63d167 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.232345] env[68673]: DEBUG oslo_vmware.api [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for the task: (returnval){ [ 1223.232345] env[68673]: value = "task-3433526" [ 1223.232345] env[68673]: _type = "Task" [ 1223.232345] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.240174] env[68673]: DEBUG oslo_vmware.api [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Task: {'id': task-3433526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.499140] env[68673]: DEBUG oslo_vmware.api [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Task: {'id': task-3433524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067544} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.499404] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.499589] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1223.499760] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1223.499941] env[68673]: INFO nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1223.502104] env[68673]: DEBUG nova.compute.claims [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1223.502279] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.502487] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.683222] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1223.683222] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Creating directory with path [datastore1] vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1223.683222] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38f40d61-29a8-4adb-a62b-a7eec7c65a9c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.694534] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Created directory with path [datastore1] vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1223.694741] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Fetch image to [datastore1] vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1223.694929] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1223.695775] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4ef922-f6ae-4b3d-9554-edf13191a91e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.702755] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843d2ee2-0d76-4ab7-be41-c6b2dd12df3e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.714475] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6aea90-da43-46e3-a898-c1f14b82610e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.753761] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b878d09-b6fe-4e23-9476-9830e971cb7d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.762564] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8c30a763-1454-472b-9e6b-5401ddd1680a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.764302] env[68673]: DEBUG oslo_vmware.api [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Task: {'id': task-3433526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06558} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.764543] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.764728] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1223.764962] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1223.765130] env[68673]: INFO nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1223.769745] env[68673]: DEBUG nova.compute.claims [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1223.769745] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.786197] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1223.821313] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264fbc9d-b6dc-43c9-a3c1-7e159bfaff91 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.830228] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa144894-7e83-48ee-9391-ae3a6bda1f50 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.859476] env[68673]: DEBUG oslo_vmware.rw_handles [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1223.861286] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14734c33-22a8-4ea5-911e-a4fb804559f1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.919818] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5716332-ae69-41a9-90bf-b286b658f265 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.925608] env[68673]: DEBUG oslo_vmware.rw_handles [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1223.925781] env[68673]: DEBUG oslo_vmware.rw_handles [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1223.934107] env[68673]: DEBUG nova.compute.provider_tree [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.942661] env[68673]: DEBUG nova.scheduler.client.report [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1223.962425] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.460s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.962993] env[68673]: ERROR nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1223.962993] env[68673]: Faults: ['InvalidArgument'] [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Traceback (most recent call last): [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] self.driver.spawn(context, instance, image_meta, [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] self._fetch_image_if_missing(context, vi) [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] image_cache(vi, tmp_image_ds_loc) [ 1223.962993] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] vm_util.copy_virtual_disk( [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] session._wait_for_task(vmdk_copy_task) [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] return self.wait_for_task(task_ref) [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] return evt.wait() [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] result = hub.switch() [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] return self.greenlet.switch() [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1223.963409] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] self.f(*self.args, **self.kw) [ 1223.963825] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1223.963825] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] raise exceptions.translate_fault(task_info.error) [ 1223.963825] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1223.963825] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Faults: ['InvalidArgument'] [ 1223.963825] env[68673]: ERROR nova.compute.manager [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] [ 1223.963825] env[68673]: DEBUG nova.compute.utils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1223.964988] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.195s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.967883] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Build of instance 579c535d-7061-4822-8f7f-50b36ddfd44b was re-scheduled: A specified parameter was not correct: fileType [ 1223.967883] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1223.968286] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1223.968459] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1223.968612] env[68673]: DEBUG nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1223.968776] env[68673]: DEBUG nova.network.neutron [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1224.244859] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f80e158-060c-4c50-b992-7a7703ae5028 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.256435] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae812428-bd59-4b28-8729-8efe0ee2fc38 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.289675] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9227c38-ecbf-47f7-bbb7-f0b6cda2c013 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.297922] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029f80a7-9674-494a-88c9-ef5352c45348 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.303452] env[68673]: DEBUG nova.network.neutron [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.314454] env[68673]: DEBUG nova.compute.provider_tree [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1224.317220] env[68673]: INFO nova.compute.manager [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Took 0.35 seconds to deallocate network for instance. [ 1224.322591] env[68673]: DEBUG nova.scheduler.client.report [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1224.338274] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.373s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.339050] env[68673]: ERROR nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = getattr(controller, method)(*args, **kwargs) [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._get(image_id) [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1224.339050] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] resp, body = self.http_client.get(url, headers=header) [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.request(url, 'GET', **kwargs) [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._handle_response(resp) [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exc.from_response(resp, resp.content) [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] During handling of the above exception, another exception occurred: [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.339435] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self.driver.spawn(context, instance, image_meta, [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._fetch_image_if_missing(context, vi) [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image_fetch(context, vi, tmp_image_ds_loc) [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] images.fetch_image( [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] metadata = IMAGE_API.get(context, image_ref) [ 1224.339839] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return session.show(context, image_id, [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] _reraise_translated_image_exception(image_id) [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise new_exc.with_traceback(exc_trace) [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = getattr(controller, method)(*args, **kwargs) [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._get(image_id) [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1224.340212] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] resp, body = self.http_client.get(url, headers=header) [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.request(url, 'GET', **kwargs) [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._handle_response(resp) [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exc.from_response(resp, resp.content) [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1224.340553] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.340813] env[68673]: DEBUG nova.compute.utils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1224.341500] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Build of instance dcb71746-662e-4ace-afcb-a997d236f12b was re-scheduled: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1224.341947] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1224.342170] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1224.342322] env[68673]: DEBUG nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1224.342436] env[68673]: DEBUG nova.network.neutron [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1224.416748] env[68673]: INFO nova.scheduler.client.report [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Deleted allocations for instance 579c535d-7061-4822-8f7f-50b36ddfd44b [ 1224.440998] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e2a98f5d-d8c7-4b2c-a9d5-f192aa66e042 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 579.723s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.443107] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 381.375s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.443107] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Acquiring lock "579c535d-7061-4822-8f7f-50b36ddfd44b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.443107] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.443996] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.445559] env[68673]: DEBUG neutronclient.v2_0.client [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68673) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1224.447734] env[68673]: ERROR nova.compute.manager [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = getattr(controller, method)(*args, **kwargs) [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._get(image_id) [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1224.447734] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] resp, body = self.http_client.get(url, headers=header) [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.request(url, 'GET', **kwargs) [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._handle_response(resp) [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exc.from_response(resp, resp.content) [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] During handling of the above exception, another exception occurred: [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.448093] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self.driver.spawn(context, instance, image_meta, [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._fetch_image_if_missing(context, vi) [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image_fetch(context, vi, tmp_image_ds_loc) [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] images.fetch_image( [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] metadata = IMAGE_API.get(context, image_ref) [ 1224.448405] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return session.show(context, image_id, [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] _reraise_translated_image_exception(image_id) [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise new_exc.with_traceback(exc_trace) [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = getattr(controller, method)(*args, **kwargs) [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._get(image_id) [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1224.448762] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] resp, body = self.http_client.get(url, headers=header) [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.request(url, 'GET', **kwargs) [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self._handle_response(resp) [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exc.from_response(resp, resp.content) [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] During handling of the above exception, another exception occurred: [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.449145] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._build_and_run_instance(context, instance, image, [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exception.RescheduledException( [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] nova.exception.RescheduledException: Build of instance dcb71746-662e-4ace-afcb-a997d236f12b was re-scheduled: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] During handling of the above exception, another exception occurred: [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1224.449537] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] exception_handler_v20(status_code, error_body) [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise client_exc(message=error_message, [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Neutron server returns request_ids: ['req-0316af33-503c-40d4-a591-6d85791a4db0'] [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] During handling of the above exception, another exception occurred: [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._deallocate_network(context, instance, requested_networks) [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self.network_api.deallocate_for_instance( [ 1224.449944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] data = neutron.list_ports(**search_opts) [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.list('ports', self.ports_path, retrieve_all, [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] for r in self._pagination(collection, path, **params): [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] res = self.get(path, params=params) [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.450362] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.retry_request("GET", action, body=body, [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.do_request(method, action, body=body, [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._handle_fault_response(status_code, replybody, resp) [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exception.Unauthorized() [ 1224.450753] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] nova.exception.Unauthorized: Not authorized. [ 1224.451141] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.451141] env[68673]: INFO nova.compute.manager [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Terminating instance [ 1224.452750] env[68673]: DEBUG nova.compute.manager [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1224.452946] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1224.454129] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51754625-38da-4d77-9516-24a98db46716 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.458481] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1224.468014] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd82e568-a7a6-42c9-ba84-24656d92b2f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.496546] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 579c535d-7061-4822-8f7f-50b36ddfd44b could not be found. [ 1224.496719] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1224.497043] env[68673]: INFO nova.compute.manager [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1224.498102] env[68673]: DEBUG oslo.service.loopingcall [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1224.498102] env[68673]: DEBUG nova.compute.manager [-] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1224.498102] env[68673]: DEBUG nova.network.neutron [-] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1224.502292] env[68673]: INFO nova.scheduler.client.report [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Deleted allocations for instance dcb71746-662e-4ace-afcb-a997d236f12b [ 1224.516283] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8de2d6bc-a933-45b9-8fef-c3abac8164b7 tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "dcb71746-662e-4ace-afcb-a997d236f12b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 577.806s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.517774] env[68673]: DEBUG oslo_concurrency.lockutils [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "dcb71746-662e-4ace-afcb-a997d236f12b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 379.027s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.518533] env[68673]: DEBUG oslo_concurrency.lockutils [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Acquiring lock "dcb71746-662e-4ace-afcb-a997d236f12b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.518533] env[68673]: DEBUG oslo_concurrency.lockutils [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "dcb71746-662e-4ace-afcb-a997d236f12b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.518533] env[68673]: DEBUG oslo_concurrency.lockutils [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "dcb71746-662e-4ace-afcb-a997d236f12b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.520983] env[68673]: INFO nova.compute.manager [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Terminating instance [ 1224.522257] env[68673]: DEBUG nova.compute.manager [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1224.522407] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1224.522941] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87708ecb-5580-436d-afbd-fbb39a264e0b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.525728] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.526502] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.527403] env[68673]: INFO nova.compute.claims [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1224.538740] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66093de6-67fa-4133-92c5-69342639e0a7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.551082] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1224.553992] env[68673]: DEBUG nova.network.neutron [-] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.563996] env[68673]: INFO nova.compute.manager [-] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] Took 0.07 seconds to deallocate network for instance. [ 1224.576309] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dcb71746-662e-4ace-afcb-a997d236f12b could not be found. [ 1224.576514] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1224.576692] env[68673]: INFO nova.compute.manager [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1224.576924] env[68673]: DEBUG oslo.service.loopingcall [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1224.581617] env[68673]: DEBUG nova.compute.manager [-] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1224.581719] env[68673]: DEBUG nova.network.neutron [-] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1224.633619] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.686676] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a4c1f295-f229-44ee-a9a1-4fdfbc7d96c6 tempest-ServersAdminNegativeTestJSON-1805023218 tempest-ServersAdminNegativeTestJSON-1805023218-project-member] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.244s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.688124] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.565s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.688323] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 579c535d-7061-4822-8f7f-50b36ddfd44b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1224.688497] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "579c535d-7061-4822-8f7f-50b36ddfd44b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.714277] env[68673]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68673) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1224.714531] env[68673]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-695ab527-9044-42c5-b4ed-e765abddaefb'] [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1224.715250] env[68673]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1224.715808] env[68673]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1224.716342] env[68673]: ERROR oslo.service.loopingcall [ 1224.717629] env[68673]: ERROR nova.compute.manager [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1224.751478] env[68673]: ERROR nova.compute.manager [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] exception_handler_v20(status_code, error_body) [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise client_exc(message=error_message, [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Neutron server returns request_ids: ['req-695ab527-9044-42c5-b4ed-e765abddaefb'] [ 1224.751478] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] During handling of the above exception, another exception occurred: [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Traceback (most recent call last): [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._delete_instance(context, instance, bdms) [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._shutdown_instance(context, instance, bdms) [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._try_deallocate_network(context, instance, requested_networks) [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] with excutils.save_and_reraise_exception(): [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1224.751944] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self.force_reraise() [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise self.value [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] _deallocate_network_with_retries() [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return evt.wait() [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = hub.switch() [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.greenlet.switch() [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1224.752355] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = func(*self.args, **self.kw) [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] result = f(*args, **kwargs) [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._deallocate_network( [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self.network_api.deallocate_for_instance( [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] data = neutron.list_ports(**search_opts) [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.list('ports', self.ports_path, retrieve_all, [ 1224.752872] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] for r in self._pagination(collection, path, **params): [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] res = self.get(path, params=params) [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.retry_request("GET", action, body=body, [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1224.753495] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] return self.do_request(method, action, body=body, [ 1224.753845] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.753845] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] ret = obj(*args, **kwargs) [ 1224.753845] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1224.753845] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] self._handle_fault_response(status_code, replybody, resp) [ 1224.753845] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1224.753845] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1224.753845] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1224.753845] env[68673]: ERROR nova.compute.manager [instance: dcb71746-662e-4ace-afcb-a997d236f12b] [ 1224.785208] env[68673]: DEBUG oslo_concurrency.lockutils [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Lock "dcb71746-662e-4ace-afcb-a997d236f12b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.267s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.787301] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "dcb71746-662e-4ace-afcb-a997d236f12b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.664s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.787301] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1224.787301] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "dcb71746-662e-4ace-afcb-a997d236f12b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.839265] env[68673]: INFO nova.compute.manager [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] [instance: dcb71746-662e-4ace-afcb-a997d236f12b] Successfully reverted task state from None on failure for instance. [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server [None req-07213c5c-2e23-4fdc-b8de-043a519a721d tempest-ListImageFiltersTestJSON-2062211666 tempest-ListImageFiltersTestJSON-2062211666-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-695ab527-9044-42c5-b4ed-e765abddaefb'] [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1224.846013] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1224.846658] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1224.847214] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1224.847967] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1224.848516] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1224.849145] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.849668] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1224.849668] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1224.849668] env[68673]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1224.849668] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1224.849668] env[68673]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1224.849668] env[68673]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1224.849668] env[68673]: ERROR oslo_messaging.rpc.server [ 1224.855690] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfd2025-c737-46a7-bd4b-ba3063f1a6c0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.863458] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb757000-bb57-4dd5-a78a-e65111abd44c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.893923] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01576f4d-f89c-417c-a94d-2749238f3690 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.901133] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babf46a1-8b9c-4ab9-89aa-08f5bf82df89 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.914055] env[68673]: DEBUG nova.compute.provider_tree [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1224.922353] env[68673]: DEBUG nova.scheduler.client.report [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1224.937011] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.411s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.937497] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1224.939769] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.306s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.941231] env[68673]: INFO nova.compute.claims [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1224.969854] env[68673]: DEBUG nova.compute.utils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1224.971167] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1224.971336] env[68673]: DEBUG nova.network.neutron [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1224.978637] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1225.041868] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1225.051580] env[68673]: DEBUG nova.policy [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b0ed7aa0f5c413d9cc32a8bbf4724df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92e8efd351c449e8815c0ec3b6070d20', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1225.068189] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1225.068441] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1225.068601] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1225.068780] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1225.068926] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1225.069076] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1225.069285] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1225.069443] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1225.069605] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1225.069805] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1225.069976] env[68673]: DEBUG nova.virt.hardware [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1225.070839] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722ece34-91a3-4972-94a8-aba4bce4e2e5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.080418] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec65bf3-e619-4d50-ba5d-cbce82aabb85 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.200711] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c90deb-885f-41cf-90ef-4795917011d0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.209092] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aeffb3e-4837-4f73-9575-14bffccea27e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.238583] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebb9bbc-38a1-40b0-84b0-629bbb169d2f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.246155] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b9bfe1-f7b7-49b0-b443-1af6646ec5bf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.259107] env[68673]: DEBUG nova.compute.provider_tree [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.268435] env[68673]: DEBUG nova.scheduler.client.report [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1225.284032] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.344s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.284032] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1225.319064] env[68673]: DEBUG nova.compute.utils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1225.320552] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Not allocating networking since 'none' was specified. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1225.330372] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1225.398019] env[68673]: DEBUG nova.network.neutron [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Successfully created port: 51e583a4-b6b4-40b0-9854-1cab80730efe {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1225.413020] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1225.436405] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1225.436664] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1225.436827] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1225.437019] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1225.437168] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1225.437353] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1225.437699] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1225.437871] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1225.438134] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1225.438331] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1225.438509] env[68673]: DEBUG nova.virt.hardware [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1225.439416] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2de5250-370b-40f6-811c-0b4d974283ae {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.448386] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7527d3-7b81-4091-b879-5d81391df496 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.462900] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance VIF info [] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1225.468673] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Creating folder: Project (ee1d36acce7c4a438c3074dc890e0294). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1225.468979] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-653dd2d7-267b-47c6-b517-0b48c63e8c5b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.482546] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Created folder: Project (ee1d36acce7c4a438c3074dc890e0294) in parent group-v685311. [ 1225.482546] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Creating folder: Instances. Parent ref: group-v685378. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1225.482739] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c142122d-f768-4847-b3f2-888cb5b78226 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.490843] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Created folder: Instances in parent group-v685378. [ 1225.490918] env[68673]: DEBUG oslo.service.loopingcall [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1225.492144] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1225.492144] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88f695a8-a1ec-40b2-85e4-1de843bff436 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.508193] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1225.508193] env[68673]: value = "task-3433529" [ 1225.508193] env[68673]: _type = "Task" [ 1225.508193] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.515715] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433529, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.018822] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433529, 'name': CreateVM_Task, 'duration_secs': 0.252595} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.019096] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1226.019416] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.019572] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.019893] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1226.020158] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fc68ec0-86ca-4d8a-b89f-f36ccd579512 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.024985] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Waiting for the task: (returnval){ [ 1226.024985] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]528038d5-9952-ab3c-3150-9ded3e5e9dc8" [ 1226.024985] env[68673]: _type = "Task" [ 1226.024985] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.034236] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]528038d5-9952-ab3c-3150-9ded3e5e9dc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.194713] env[68673]: DEBUG nova.network.neutron [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Successfully updated port: 51e583a4-b6b4-40b0-9854-1cab80730efe {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1226.207564] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "refresh_cache-3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.207564] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "refresh_cache-3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.207844] env[68673]: DEBUG nova.network.neutron [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1226.256599] env[68673]: DEBUG nova.network.neutron [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1226.485466] env[68673]: DEBUG nova.compute.manager [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Received event network-vif-plugged-51e583a4-b6b4-40b0-9854-1cab80730efe {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1226.485466] env[68673]: DEBUG oslo_concurrency.lockutils [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] Acquiring lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.485647] env[68673]: DEBUG oslo_concurrency.lockutils [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] Lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.485775] env[68673]: DEBUG oslo_concurrency.lockutils [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] Lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.485949] env[68673]: DEBUG nova.compute.manager [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] No waiting events found dispatching network-vif-plugged-51e583a4-b6b4-40b0-9854-1cab80730efe {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1226.486258] env[68673]: WARNING nova.compute.manager [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Received unexpected event network-vif-plugged-51e583a4-b6b4-40b0-9854-1cab80730efe for instance with vm_state building and task_state spawning. [ 1226.486440] env[68673]: DEBUG nova.compute.manager [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Received event network-changed-51e583a4-b6b4-40b0-9854-1cab80730efe {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1226.486599] env[68673]: DEBUG nova.compute.manager [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Refreshing instance network info cache due to event network-changed-51e583a4-b6b4-40b0-9854-1cab80730efe. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1226.486764] env[68673]: DEBUG oslo_concurrency.lockutils [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] Acquiring lock "refresh_cache-3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.534577] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.534950] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1226.535052] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.628720] env[68673]: DEBUG nova.network.neutron [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Updating instance_info_cache with network_info: [{"id": "51e583a4-b6b4-40b0-9854-1cab80730efe", "address": "fa:16:3e:d8:07:42", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51e583a4-b6", "ovs_interfaceid": "51e583a4-b6b4-40b0-9854-1cab80730efe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.640894] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "refresh_cache-3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.641196] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Instance network_info: |[{"id": "51e583a4-b6b4-40b0-9854-1cab80730efe", "address": "fa:16:3e:d8:07:42", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51e583a4-b6", "ovs_interfaceid": "51e583a4-b6b4-40b0-9854-1cab80730efe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1226.641491] env[68673]: DEBUG oslo_concurrency.lockutils [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] Acquired lock "refresh_cache-3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.641717] env[68673]: DEBUG nova.network.neutron [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Refreshing network info cache for port 51e583a4-b6b4-40b0-9854-1cab80730efe {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1226.642825] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:07:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51e583a4-b6b4-40b0-9854-1cab80730efe', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1226.651167] env[68673]: DEBUG oslo.service.loopingcall [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1226.654224] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1226.654690] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-412f43b1-3aca-4325-97c6-cb842bb4619a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.674868] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1226.674868] env[68673]: value = "task-3433530" [ 1226.674868] env[68673]: _type = "Task" [ 1226.674868] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.683955] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433530, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.989567] env[68673]: DEBUG nova.network.neutron [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Updated VIF entry in instance network info cache for port 51e583a4-b6b4-40b0-9854-1cab80730efe. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1226.989937] env[68673]: DEBUG nova.network.neutron [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Updating instance_info_cache with network_info: [{"id": "51e583a4-b6b4-40b0-9854-1cab80730efe", "address": "fa:16:3e:d8:07:42", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51e583a4-b6", "ovs_interfaceid": "51e583a4-b6b4-40b0-9854-1cab80730efe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.999708] env[68673]: DEBUG oslo_concurrency.lockutils [req-32cf3b71-9aa1-46b2-a34a-56da788d9d82 req-2c98a32f-fe41-439b-8a67-c44638e76dbb service nova] Releasing lock "refresh_cache-3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1227.129682] env[68673]: DEBUG oslo_concurrency.lockutils [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.185087] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433530, 'name': CreateVM_Task, 'duration_secs': 0.270698} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.185260] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1227.185986] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.186158] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.186506] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1227.186779] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72d87817-4f6c-48e6-ad31-bb0726bd2949 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.190942] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1227.190942] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52e24a9d-c8d8-ff53-9fbd-532e78aea9a2" [ 1227.190942] env[68673]: _type = "Task" [ 1227.190942] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.198328] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52e24a9d-c8d8-ff53-9fbd-532e78aea9a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.701576] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1227.701845] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1227.702069] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1228.005191] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.857758] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "72c1282a-8a71-4952-a02a-b6dd45269488" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.783619] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.783893] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1242.778596] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.784639] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.784981] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1244.784981] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1244.808477] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.808645] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.811632] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.811632] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.811632] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.811632] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.811632] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.812076] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.812076] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.812076] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1244.812076] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1246.784057] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.784057] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1247.601450] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquiring lock "76697868-c920-43d0-ab11-fcdff2e38dc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.601450] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.783236] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1247.795026] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.795299] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.795380] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.795523] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1247.796616] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a13c21-becf-4344-a5e2-a6c3e49c8ef6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.805147] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e8316b-475e-4f16-94fb-dc951d247b2e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.818637] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c25704-e1ec-4b83-855f-92964068a727 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.825008] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20552990-ce82-40b7-a0a5-6adc9d540c3f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.861178] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180904MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1247.861178] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.861178] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.943010] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance adb818a9-e799-4f57-93f6-ee4e32104d61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.943182] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.943380] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.943508] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.943628] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.943745] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.943857] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.943970] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.944095] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.944208] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.956178] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.966902] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 8aed9c82-b082-4f49-acf7-015a7e78e452 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.977872] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance af1da6ac-13b7-4271-b917-204dfe91cced has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.988240] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.998221] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 42eae7a3-1757-40f6-a194-58a4eb1ee3a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1248.009146] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1248.020161] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1248.020409] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1248.020581] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1248.240102] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33feb234-0118-4f48-8940-603a117148c1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.247571] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb65247-d381-41b9-beb6-6f9c5a27712c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.276860] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3019c39-4a41-4d39-838d-a3b920797af1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.284353] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9287f28f-681f-45ad-891e-9628c42b4606 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.297655] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.306070] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1248.319799] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1248.319799] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.460s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.320695] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.321054] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.783490] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1271.852179] env[68673]: WARNING oslo_vmware.rw_handles [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1271.852179] env[68673]: ERROR oslo_vmware.rw_handles [ 1271.852928] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1271.854713] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1271.854985] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Copying Virtual Disk [datastore1] vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/da9dabff-ea50-48eb-b6b7-e111455d2f2a/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1271.855339] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-854e7606-2e3f-4dd2-be05-6a30bcb8bf9b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.864631] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Waiting for the task: (returnval){ [ 1271.864631] env[68673]: value = "task-3433531" [ 1271.864631] env[68673]: _type = "Task" [ 1271.864631] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.872229] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Task: {'id': task-3433531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.375369] env[68673]: DEBUG oslo_vmware.exceptions [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1272.375668] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.376283] env[68673]: ERROR nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1272.376283] env[68673]: Faults: ['InvalidArgument'] [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Traceback (most recent call last): [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] yield resources [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] self.driver.spawn(context, instance, image_meta, [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] self._fetch_image_if_missing(context, vi) [ 1272.376283] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] image_cache(vi, tmp_image_ds_loc) [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] vm_util.copy_virtual_disk( [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] session._wait_for_task(vmdk_copy_task) [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] return self.wait_for_task(task_ref) [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] return evt.wait() [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] result = hub.switch() [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1272.376804] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] return self.greenlet.switch() [ 1272.377242] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1272.377242] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] self.f(*self.args, **self.kw) [ 1272.377242] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1272.377242] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] raise exceptions.translate_fault(task_info.error) [ 1272.377242] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1272.377242] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Faults: ['InvalidArgument'] [ 1272.377242] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] [ 1272.377242] env[68673]: INFO nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Terminating instance [ 1272.378242] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.378457] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1272.378692] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1c331b3-f147-4645-a955-bb1eb6885bd4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.381076] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1272.381270] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1272.381993] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feca3bc0-03f5-4e7d-a410-2dc1839f47dd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.388727] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1272.388952] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9d6dc48-0acd-43b5-978f-9fd6b365db33 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.391138] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1272.391315] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1272.392266] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b323fcd-d649-46a6-a9e5-4ec154e2b851 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.397074] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 1272.397074] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5246b976-e206-a58b-4b88-0ea88a31bc15" [ 1272.397074] env[68673]: _type = "Task" [ 1272.397074] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.403807] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5246b976-e206-a58b-4b88-0ea88a31bc15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.457702] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1272.457956] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1272.458614] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Deleting the datastore file [datastore1] adb818a9-e799-4f57-93f6-ee4e32104d61 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1272.458912] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-914b4580-07e9-4973-9754-edc4f3ede6e9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.465567] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Waiting for the task: (returnval){ [ 1272.465567] env[68673]: value = "task-3433533" [ 1272.465567] env[68673]: _type = "Task" [ 1272.465567] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.473761] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Task: {'id': task-3433533, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.907898] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1272.908226] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating directory with path [datastore1] vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1272.908411] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-742aacef-4d48-4c15-9581-f844aa560446 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.919825] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Created directory with path [datastore1] vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1272.920024] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Fetch image to [datastore1] vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1272.920204] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1272.920903] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae11afb-9cf6-423e-a185-1329001f7017 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.927539] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fa7b7e-0743-480f-93fb-7888fdce7a30 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.936390] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90d4c5f-6930-44f8-931d-ed46c423691d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.970245] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25d7410-653d-4bab-930e-d71e360350ff {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.977443] env[68673]: DEBUG oslo_vmware.api [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Task: {'id': task-3433533, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070243} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.978922] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1272.979165] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1272.979294] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1272.979467] env[68673]: INFO nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1272.981560] env[68673]: DEBUG nova.compute.claims [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1272.981728] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.981939] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.984495] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4dbd1b01-8d81-401a-9505-f0635f7acad5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.006068] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1273.057358] env[68673]: DEBUG oslo_vmware.rw_handles [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1273.120322] env[68673]: DEBUG oslo_vmware.rw_handles [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1273.120609] env[68673]: DEBUG oslo_vmware.rw_handles [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1273.270721] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586f3e19-9824-4dc6-8392-d8e34106028b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.278320] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04099daf-131d-4a6d-8e9d-6c2d0150ae3d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.307917] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3e40e8-8b10-4e0d-af81-16965869ec05 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.314572] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03e8a35-fc2a-4f8d-bc66-58aee4280c17 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.328358] env[68673]: DEBUG nova.compute.provider_tree [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.336912] env[68673]: DEBUG nova.scheduler.client.report [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1273.352326] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.370s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1273.353054] env[68673]: ERROR nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1273.353054] env[68673]: Faults: ['InvalidArgument'] [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Traceback (most recent call last): [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] self.driver.spawn(context, instance, image_meta, [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] self._fetch_image_if_missing(context, vi) [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] image_cache(vi, tmp_image_ds_loc) [ 1273.353054] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] vm_util.copy_virtual_disk( [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] session._wait_for_task(vmdk_copy_task) [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] return self.wait_for_task(task_ref) [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] return evt.wait() [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] result = hub.switch() [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] return self.greenlet.switch() [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1273.353491] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] self.f(*self.args, **self.kw) [ 1273.353898] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1273.353898] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] raise exceptions.translate_fault(task_info.error) [ 1273.353898] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1273.353898] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Faults: ['InvalidArgument'] [ 1273.353898] env[68673]: ERROR nova.compute.manager [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] [ 1273.354061] env[68673]: DEBUG nova.compute.utils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1273.355509] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Build of instance adb818a9-e799-4f57-93f6-ee4e32104d61 was re-scheduled: A specified parameter was not correct: fileType [ 1273.355509] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1273.355886] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1273.356068] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1273.356241] env[68673]: DEBUG nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1273.356424] env[68673]: DEBUG nova.network.neutron [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1274.031033] env[68673]: DEBUG nova.network.neutron [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.041933] env[68673]: INFO nova.compute.manager [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Took 0.69 seconds to deallocate network for instance. [ 1274.139557] env[68673]: INFO nova.scheduler.client.report [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Deleted allocations for instance adb818a9-e799-4f57-93f6-ee4e32104d61 [ 1274.159076] env[68673]: DEBUG oslo_concurrency.lockutils [None req-09c5bd68-29a3-45d5-8851-ead3efb97e8a tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.561s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.160271] env[68673]: DEBUG oslo_concurrency.lockutils [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 421.614s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.161223] env[68673]: DEBUG oslo_concurrency.lockutils [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Acquiring lock "adb818a9-e799-4f57-93f6-ee4e32104d61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.161550] env[68673]: DEBUG oslo_concurrency.lockutils [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.161771] env[68673]: DEBUG oslo_concurrency.lockutils [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.164025] env[68673]: INFO nova.compute.manager [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Terminating instance [ 1274.165898] env[68673]: DEBUG nova.compute.manager [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1274.166099] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1274.166583] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f44c5e6c-238b-431f-ae17-af6ea974996d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.171772] env[68673]: DEBUG nova.compute.manager [None req-24d09f2c-e046-43da-8899-12c3f3c3343b tempest-FloatingIPsAssociationNegativeTestJSON-319373223 tempest-FloatingIPsAssociationNegativeTestJSON-319373223-project-member] [instance: 2c59faff-0fb2-452c-9b1f-3dde3767d699] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1274.178282] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39219055-621e-4011-ba63-ab18001cb75e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.194569] env[68673]: DEBUG nova.compute.manager [None req-24d09f2c-e046-43da-8899-12c3f3c3343b tempest-FloatingIPsAssociationNegativeTestJSON-319373223 tempest-FloatingIPsAssociationNegativeTestJSON-319373223-project-member] [instance: 2c59faff-0fb2-452c-9b1f-3dde3767d699] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1274.206528] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance adb818a9-e799-4f57-93f6-ee4e32104d61 could not be found. [ 1274.206767] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1274.206952] env[68673]: INFO nova.compute.manager [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1274.207204] env[68673]: DEBUG oslo.service.loopingcall [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1274.207428] env[68673]: DEBUG nova.compute.manager [-] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1274.207521] env[68673]: DEBUG nova.network.neutron [-] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1274.223315] env[68673]: DEBUG oslo_concurrency.lockutils [None req-24d09f2c-e046-43da-8899-12c3f3c3343b tempest-FloatingIPsAssociationNegativeTestJSON-319373223 tempest-FloatingIPsAssociationNegativeTestJSON-319373223-project-member] Lock "2c59faff-0fb2-452c-9b1f-3dde3767d699" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.542s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.230629] env[68673]: DEBUG nova.network.neutron [-] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.236086] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1274.238800] env[68673]: INFO nova.compute.manager [-] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] Took 0.03 seconds to deallocate network for instance. [ 1274.284767] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.285033] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.286451] env[68673]: INFO nova.compute.claims [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1274.322255] env[68673]: DEBUG oslo_concurrency.lockutils [None req-15b53967-af3c-4093-9355-a9a1d7c186a5 tempest-InstanceActionsV221TestJSON-1619887809 tempest-InstanceActionsV221TestJSON-1619887809-project-member] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.323458] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 62.200s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.323650] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: adb818a9-e799-4f57-93f6-ee4e32104d61] During sync_power_state the instance has a pending task (deleting). Skip. [ 1274.323821] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "adb818a9-e799-4f57-93f6-ee4e32104d61" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.498832] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a445ef6-2887-428b-b5d3-d6393717dccd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.506401] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49044f34-d998-448a-b9e6-fa1f67bfdcbb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.541893] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f35ea7-0f94-46e5-9341-5e441a2c75e7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.550612] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d78fa5-f64c-4899-b65d-6665fad67d86 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.563537] env[68673]: DEBUG nova.compute.provider_tree [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.575436] env[68673]: DEBUG nova.scheduler.client.report [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1274.588829] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.304s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.589346] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1274.626048] env[68673]: DEBUG nova.compute.utils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1274.627508] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1274.627722] env[68673]: DEBUG nova.network.neutron [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1274.636472] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1274.693901] env[68673]: DEBUG nova.policy [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '651e7a0fac0b41ff98ffa3d9f5a7a238', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1580894c604a4189aefff7162f578876', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1274.699550] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1274.724196] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1274.724719] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1274.724719] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1274.724868] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1274.724913] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1274.725046] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1274.725299] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1274.725493] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1274.725665] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1274.725825] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1274.725996] env[68673]: DEBUG nova.virt.hardware [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1274.726948] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05195a79-1764-4cfe-aef5-fd2ab061b8ef {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.734736] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3a4566-eff3-408b-a33b-a2d40f849b6f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.167158] env[68673]: DEBUG nova.network.neutron [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Successfully created port: 6e3f90e5-bfe9-4675-b27f-5b561aabc88d {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1275.810928] env[68673]: DEBUG nova.network.neutron [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Successfully updated port: 6e3f90e5-bfe9-4675-b27f-5b561aabc88d {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1275.829661] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "refresh_cache-074705bc-7378-43f8-8ed3-b3b5ea38f50a" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.829816] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquired lock "refresh_cache-074705bc-7378-43f8-8ed3-b3b5ea38f50a" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.829967] env[68673]: DEBUG nova.network.neutron [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1275.873804] env[68673]: DEBUG nova.network.neutron [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1276.110684] env[68673]: DEBUG nova.network.neutron [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Updating instance_info_cache with network_info: [{"id": "6e3f90e5-bfe9-4675-b27f-5b561aabc88d", "address": "fa:16:3e:ab:c3:67", "network": {"id": "6b093bf4-1f0b-4bfd-9f0f-45bc5f4d2615", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1586383297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1580894c604a4189aefff7162f578876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f90e5-bf", "ovs_interfaceid": "6e3f90e5-bfe9-4675-b27f-5b561aabc88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.121572] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Releasing lock "refresh_cache-074705bc-7378-43f8-8ed3-b3b5ea38f50a" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.121847] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Instance network_info: |[{"id": "6e3f90e5-bfe9-4675-b27f-5b561aabc88d", "address": "fa:16:3e:ab:c3:67", "network": {"id": "6b093bf4-1f0b-4bfd-9f0f-45bc5f4d2615", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1586383297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1580894c604a4189aefff7162f578876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f90e5-bf", "ovs_interfaceid": "6e3f90e5-bfe9-4675-b27f-5b561aabc88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1276.123424] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:c3:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16e15a36-a55b-4c27-b864-f284339009d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e3f90e5-bfe9-4675-b27f-5b561aabc88d', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1276.130859] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Creating folder: Project (1580894c604a4189aefff7162f578876). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1276.132204] env[68673]: DEBUG nova.compute.manager [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Received event network-vif-plugged-6e3f90e5-bfe9-4675-b27f-5b561aabc88d {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1276.132402] env[68673]: DEBUG oslo_concurrency.lockutils [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] Acquiring lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.132600] env[68673]: DEBUG oslo_concurrency.lockutils [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] Lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.132761] env[68673]: DEBUG oslo_concurrency.lockutils [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] Lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.132921] env[68673]: DEBUG nova.compute.manager [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] No waiting events found dispatching network-vif-plugged-6e3f90e5-bfe9-4675-b27f-5b561aabc88d {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1276.133096] env[68673]: WARNING nova.compute.manager [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Received unexpected event network-vif-plugged-6e3f90e5-bfe9-4675-b27f-5b561aabc88d for instance with vm_state building and task_state spawning. [ 1276.133253] env[68673]: DEBUG nova.compute.manager [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Received event network-changed-6e3f90e5-bfe9-4675-b27f-5b561aabc88d {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1276.133403] env[68673]: DEBUG nova.compute.manager [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Refreshing instance network info cache due to event network-changed-6e3f90e5-bfe9-4675-b27f-5b561aabc88d. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1276.133579] env[68673]: DEBUG oslo_concurrency.lockutils [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] Acquiring lock "refresh_cache-074705bc-7378-43f8-8ed3-b3b5ea38f50a" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.133715] env[68673]: DEBUG oslo_concurrency.lockutils [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] Acquired lock "refresh_cache-074705bc-7378-43f8-8ed3-b3b5ea38f50a" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.133866] env[68673]: DEBUG nova.network.neutron [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Refreshing network info cache for port 6e3f90e5-bfe9-4675-b27f-5b561aabc88d {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1276.135029] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8eb8f45-36ce-4790-af9e-264809b21ccf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.148298] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Created folder: Project (1580894c604a4189aefff7162f578876) in parent group-v685311. [ 1276.148500] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Creating folder: Instances. Parent ref: group-v685382. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1276.148726] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79048296-88c0-44d4-b807-dc8ab9c2eea5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.157577] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Created folder: Instances in parent group-v685382. [ 1276.157821] env[68673]: DEBUG oslo.service.loopingcall [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1276.158010] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1276.158214] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03cffbe2-f6a5-41dd-8ba6-c285bf024512 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.181616] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1276.181616] env[68673]: value = "task-3433536" [ 1276.181616] env[68673]: _type = "Task" [ 1276.181616] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.191546] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433536, 'name': CreateVM_Task} progress is 5%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.468540] env[68673]: DEBUG nova.network.neutron [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Updated VIF entry in instance network info cache for port 6e3f90e5-bfe9-4675-b27f-5b561aabc88d. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1276.469083] env[68673]: DEBUG nova.network.neutron [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Updating instance_info_cache with network_info: [{"id": "6e3f90e5-bfe9-4675-b27f-5b561aabc88d", "address": "fa:16:3e:ab:c3:67", "network": {"id": "6b093bf4-1f0b-4bfd-9f0f-45bc5f4d2615", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1586383297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1580894c604a4189aefff7162f578876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f90e5-bf", "ovs_interfaceid": "6e3f90e5-bfe9-4675-b27f-5b561aabc88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.480009] env[68673]: DEBUG oslo_concurrency.lockutils [req-dda1973b-c289-4f31-a14e-a5a9946bacaf req-7dca7607-f2d2-41a9-812a-903ed71ac351 service nova] Releasing lock "refresh_cache-074705bc-7378-43f8-8ed3-b3b5ea38f50a" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.691739] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433536, 'name': CreateVM_Task, 'duration_secs': 0.341222} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.691900] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1276.698922] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.699105] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.699417] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1276.699656] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34fd9c03-170f-4953-b928-9bccc7a5c145 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.704050] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Waiting for the task: (returnval){ [ 1276.704050] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5248d3ca-e64f-7fa2-ed54-115156a9ffe4" [ 1276.704050] env[68673]: _type = "Task" [ 1276.704050] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.711474] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5248d3ca-e64f-7fa2-ed54-115156a9ffe4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.214245] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.214579] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1277.214755] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.314662] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.314968] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.197722] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.250686] env[68673]: DEBUG oslo_concurrency.lockutils [None req-643a3c71-1391-41e4-83da-3e7aa6142476 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] Acquiring lock "e5e06bf8-a836-47a7-87b8-47a04d0b3991" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.251053] env[68673]: DEBUG oslo_concurrency.lockutils [None req-643a3c71-1391-41e4-83da-3e7aa6142476 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] Lock "e5e06bf8-a836-47a7-87b8-47a04d0b3991" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.784967] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.785296] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1303.779550] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.784430] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.784430] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1306.784430] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1306.811063] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.811230] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.811363] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.811487] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.811809] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.811965] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.812102] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.812224] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.812344] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.812465] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1306.812589] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1306.813113] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.783787] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1308.784423] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.784102] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.795198] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.795512] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.795615] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.795777] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1309.796920] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7887e6e3-88a0-436d-af93-e8ffb2150951 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.805833] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8eaf338-91ab-4841-8817-c19942c2f3af {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.822426] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca328f84-8729-42fa-bb52-fd9ee55f582b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.828834] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225b8982-d09a-4153-8736-a5af71aee87c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.860078] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180922MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1309.860252] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.860458] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.936096] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.936309] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.936474] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.936646] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.936800] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.936952] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.937092] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.937225] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.937343] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.937458] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.949387] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.960865] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 42eae7a3-1757-40f6-a194-58a4eb1ee3a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.971615] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.981455] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.990923] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1310.000566] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance e5e06bf8-a836-47a7-87b8-47a04d0b3991 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1310.000809] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1310.000955] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1310.183320] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3840abe4-2f89-4cb9-b93b-ef6e80fc16fa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.190728] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b37cb72-b32c-4ce6-b92e-fe15383fe491 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.220486] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443b9099-bf85-47d6-9ac5-c668abe236d9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.227933] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfd94f8-f0f4-4933-8ad1-353a82892e91 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.241249] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1310.249597] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1310.264165] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1310.264355] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.404s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.263736] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.264047] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.780054] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.685233] env[68673]: WARNING oslo_vmware.rw_handles [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1322.685233] env[68673]: ERROR oslo_vmware.rw_handles [ 1322.686102] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1322.687729] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1322.688055] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Copying Virtual Disk [datastore1] vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/ced693d1-f4c3-4b59-b467-4e56958d2024/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1322.688359] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba3e5aa4-54b8-4577-badb-0e0f8a2098a1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.695442] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 1322.695442] env[68673]: value = "task-3433537" [ 1322.695442] env[68673]: _type = "Task" [ 1322.695442] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.703381] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.205811] env[68673]: DEBUG oslo_vmware.exceptions [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1323.206107] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.206690] env[68673]: ERROR nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1323.206690] env[68673]: Faults: ['InvalidArgument'] [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Traceback (most recent call last): [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] yield resources [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] self.driver.spawn(context, instance, image_meta, [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] self._fetch_image_if_missing(context, vi) [ 1323.206690] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] image_cache(vi, tmp_image_ds_loc) [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] vm_util.copy_virtual_disk( [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] session._wait_for_task(vmdk_copy_task) [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] return self.wait_for_task(task_ref) [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] return evt.wait() [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] result = hub.switch() [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1323.207350] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] return self.greenlet.switch() [ 1323.208072] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1323.208072] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] self.f(*self.args, **self.kw) [ 1323.208072] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1323.208072] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] raise exceptions.translate_fault(task_info.error) [ 1323.208072] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1323.208072] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Faults: ['InvalidArgument'] [ 1323.208072] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] [ 1323.208072] env[68673]: INFO nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Terminating instance [ 1323.208593] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.208737] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1323.208969] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-604a06d9-3ac7-4b59-9b94-67f8d0acc36e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.212555] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1323.212764] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1323.213618] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817bfd2c-dda0-482c-9194-75bdb1fdbd9a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.221204] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1323.221204] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5294d3cf-c17d-40cb-bb28-f0d454c33615 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.225378] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1323.225378] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1323.225378] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42e91f6e-11ae-4b86-8fb5-77a13a2ce9f1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.230477] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for the task: (returnval){ [ 1323.230477] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5227d425-ca89-c06a-0e80-ea42e0a67171" [ 1323.230477] env[68673]: _type = "Task" [ 1323.230477] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.240730] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5227d425-ca89-c06a-0e80-ea42e0a67171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.296696] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1323.296875] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1323.297113] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleting the datastore file [datastore1] 12c03ca5-3526-4ebe-84af-b2027a6e50ac {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1323.297424] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3dd7f28f-ac8e-4330-bc22-7c5a998329bb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.303665] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 1323.303665] env[68673]: value = "task-3433539" [ 1323.303665] env[68673]: _type = "Task" [ 1323.303665] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.311289] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.741110] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1323.741445] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Creating directory with path [datastore1] vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1323.741589] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a6cb5f1-d490-4b1e-98a5-04e885bc76f8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.752315] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Created directory with path [datastore1] vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1323.752499] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Fetch image to [datastore1] vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1323.752666] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1323.753411] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6de9cb-d8f9-4ad4-af38-eb437b33c6c1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.759713] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46427ddf-c581-4f9e-99aa-b3ec731a85d0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.768410] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7391c783-5c08-4f3f-922f-f042fd334571 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.799601] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc724d93-d53b-4782-a6de-e69ae8023276 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.807742] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c4d295b1-9ced-4f54-a341-9ce50ef1ef44 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.815020] env[68673]: DEBUG oslo_vmware.api [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065827} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.815020] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1323.815020] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1323.815020] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1323.815020] env[68673]: INFO nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1323.817016] env[68673]: DEBUG nova.compute.claims [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1323.817176] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.817379] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.830953] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1323.881991] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1323.941712] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1323.941899] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1324.085943] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be918bc2-eb75-400c-8b6b-737b51eddfa6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.093553] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b2f5ed-e804-412e-b1c3-312036946af9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.122601] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad3588c-b232-4ad8-96b1-a4b894e6c2f2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.129145] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f3a46f-b001-4482-ae64-94fbf5819430 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.142777] env[68673]: DEBUG nova.compute.provider_tree [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1324.151244] env[68673]: DEBUG nova.scheduler.client.report [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1324.167403] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.350s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.168049] env[68673]: ERROR nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1324.168049] env[68673]: Faults: ['InvalidArgument'] [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Traceback (most recent call last): [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] self.driver.spawn(context, instance, image_meta, [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] self._fetch_image_if_missing(context, vi) [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] image_cache(vi, tmp_image_ds_loc) [ 1324.168049] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] vm_util.copy_virtual_disk( [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] session._wait_for_task(vmdk_copy_task) [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] return self.wait_for_task(task_ref) [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] return evt.wait() [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] result = hub.switch() [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] return self.greenlet.switch() [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1324.168473] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] self.f(*self.args, **self.kw) [ 1324.168929] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1324.168929] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] raise exceptions.translate_fault(task_info.error) [ 1324.168929] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1324.168929] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Faults: ['InvalidArgument'] [ 1324.168929] env[68673]: ERROR nova.compute.manager [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] [ 1324.168929] env[68673]: DEBUG nova.compute.utils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1324.170142] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Build of instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac was re-scheduled: A specified parameter was not correct: fileType [ 1324.170142] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1324.170502] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1324.170746] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1324.170916] env[68673]: DEBUG nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1324.171089] env[68673]: DEBUG nova.network.neutron [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1324.525307] env[68673]: DEBUG nova.network.neutron [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.539302] env[68673]: INFO nova.compute.manager [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Took 0.37 seconds to deallocate network for instance. [ 1324.646758] env[68673]: INFO nova.scheduler.client.report [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleted allocations for instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac [ 1324.671244] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a9130d9d-0965-4d09-b1eb-32728c4faa7e tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 544.459s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.672423] env[68673]: DEBUG oslo_concurrency.lockutils [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 347.931s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.672578] env[68673]: DEBUG oslo_concurrency.lockutils [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.672779] env[68673]: DEBUG oslo_concurrency.lockutils [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.673058] env[68673]: DEBUG oslo_concurrency.lockutils [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.675295] env[68673]: INFO nova.compute.manager [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Terminating instance [ 1324.677261] env[68673]: DEBUG nova.compute.manager [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1324.677457] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1324.677951] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa82b26f-945f-40d8-83dc-4ba1cb31d011 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.686496] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d00556b-efba-4b51-968d-baa9e1550ccb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.698016] env[68673]: DEBUG nova.compute.manager [None req-63b49a18-1b59-4db2-84f9-e5e843d74d77 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] [instance: 8aed9c82-b082-4f49-acf7-015a7e78e452] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1324.719891] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 12c03ca5-3526-4ebe-84af-b2027a6e50ac could not be found. [ 1324.720113] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1324.720292] env[68673]: INFO nova.compute.manager [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1324.720534] env[68673]: DEBUG oslo.service.loopingcall [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1324.720758] env[68673]: DEBUG nova.compute.manager [-] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1324.720851] env[68673]: DEBUG nova.network.neutron [-] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1324.723123] env[68673]: DEBUG nova.compute.manager [None req-63b49a18-1b59-4db2-84f9-e5e843d74d77 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] [instance: 8aed9c82-b082-4f49-acf7-015a7e78e452] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1324.743107] env[68673]: DEBUG oslo_concurrency.lockutils [None req-63b49a18-1b59-4db2-84f9-e5e843d74d77 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] Lock "8aed9c82-b082-4f49-acf7-015a7e78e452" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.323s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.747145] env[68673]: DEBUG nova.network.neutron [-] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.754569] env[68673]: INFO nova.compute.manager [-] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] Took 0.03 seconds to deallocate network for instance. [ 1324.778137] env[68673]: DEBUG nova.compute.manager [None req-dee4ce23-921f-4cfc-bc18-ff9fe90850a8 tempest-ServerTagsTestJSON-238178112 tempest-ServerTagsTestJSON-238178112-project-member] [instance: af1da6ac-13b7-4271-b917-204dfe91cced] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1324.800035] env[68673]: DEBUG nova.compute.manager [None req-dee4ce23-921f-4cfc-bc18-ff9fe90850a8 tempest-ServerTagsTestJSON-238178112 tempest-ServerTagsTestJSON-238178112-project-member] [instance: af1da6ac-13b7-4271-b917-204dfe91cced] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1324.823160] env[68673]: DEBUG oslo_concurrency.lockutils [None req-dee4ce23-921f-4cfc-bc18-ff9fe90850a8 tempest-ServerTagsTestJSON-238178112 tempest-ServerTagsTestJSON-238178112-project-member] Lock "af1da6ac-13b7-4271-b917-204dfe91cced" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.161s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.832288] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1324.862639] env[68673]: DEBUG oslo_concurrency.lockutils [None req-262a45c5-c520-4901-95dd-1f9cf4792dcc tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.863817] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 112.740s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.863817] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 12c03ca5-3526-4ebe-84af-b2027a6e50ac] During sync_power_state the instance has a pending task (deleting). Skip. [ 1324.863968] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "12c03ca5-3526-4ebe-84af-b2027a6e50ac" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.881105] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.881348] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.882762] env[68673]: INFO nova.compute.claims [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1325.231237] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3976a3e7-b5f2-4437-b15f-d16a651b5098 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.238889] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c997ec2-6812-414b-a8bb-b8c81c3c7e13 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.267731] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c908136f-57ce-4ba2-ad10-a0f011162bd3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.274998] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a03a83-5c75-4844-adb0-eae58818483f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.287562] env[68673]: DEBUG nova.compute.provider_tree [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.295983] env[68673]: DEBUG nova.scheduler.client.report [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1325.309445] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.428s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.309902] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1325.341568] env[68673]: DEBUG nova.compute.utils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1325.343040] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1325.343219] env[68673]: DEBUG nova.network.neutron [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1325.355912] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1325.421675] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1325.429593] env[68673]: DEBUG nova.policy [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a135381c6987442cad21b1f74d5a9e34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8ecabf65f524fb5bfcb60401c45db96', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1325.458552] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1325.458783] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1325.458940] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.459135] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1325.459283] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.459442] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1325.459625] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1325.459782] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1325.459941] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1325.460114] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1325.460286] env[68673]: DEBUG nova.virt.hardware [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1325.461558] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c036f1b-31fb-4340-9348-328cb3266e1f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.469496] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980f7566-f3a0-4725-9587-79573f0d410d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.895441] env[68673]: DEBUG nova.network.neutron [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Successfully created port: 6d5e078c-0949-4438-b462-46bbef0c8f41 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1326.482525] env[68673]: DEBUG nova.network.neutron [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Successfully updated port: 6d5e078c-0949-4438-b462-46bbef0c8f41 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1326.496281] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "refresh_cache-c12e8044-6e77-44a6-866e-1036f69113a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.496281] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "refresh_cache-c12e8044-6e77-44a6-866e-1036f69113a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.496281] env[68673]: DEBUG nova.network.neutron [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1326.558847] env[68673]: DEBUG nova.network.neutron [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1326.708509] env[68673]: DEBUG nova.compute.manager [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Received event network-vif-plugged-6d5e078c-0949-4438-b462-46bbef0c8f41 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1326.708509] env[68673]: DEBUG oslo_concurrency.lockutils [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] Acquiring lock "c12e8044-6e77-44a6-866e-1036f69113a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.708509] env[68673]: DEBUG oslo_concurrency.lockutils [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] Lock "c12e8044-6e77-44a6-866e-1036f69113a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.708509] env[68673]: DEBUG oslo_concurrency.lockutils [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] Lock "c12e8044-6e77-44a6-866e-1036f69113a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.708728] env[68673]: DEBUG nova.compute.manager [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] No waiting events found dispatching network-vif-plugged-6d5e078c-0949-4438-b462-46bbef0c8f41 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1326.708728] env[68673]: WARNING nova.compute.manager [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Received unexpected event network-vif-plugged-6d5e078c-0949-4438-b462-46bbef0c8f41 for instance with vm_state building and task_state spawning. [ 1326.708728] env[68673]: DEBUG nova.compute.manager [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Received event network-changed-6d5e078c-0949-4438-b462-46bbef0c8f41 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1326.708728] env[68673]: DEBUG nova.compute.manager [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Refreshing instance network info cache due to event network-changed-6d5e078c-0949-4438-b462-46bbef0c8f41. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1326.708876] env[68673]: DEBUG oslo_concurrency.lockutils [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] Acquiring lock "refresh_cache-c12e8044-6e77-44a6-866e-1036f69113a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.793482] env[68673]: DEBUG nova.network.neutron [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Updating instance_info_cache with network_info: [{"id": "6d5e078c-0949-4438-b462-46bbef0c8f41", "address": "fa:16:3e:3f:8c:6d", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d5e078c-09", "ovs_interfaceid": "6d5e078c-0949-4438-b462-46bbef0c8f41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.806193] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "refresh_cache-c12e8044-6e77-44a6-866e-1036f69113a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.806609] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Instance network_info: |[{"id": "6d5e078c-0949-4438-b462-46bbef0c8f41", "address": "fa:16:3e:3f:8c:6d", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d5e078c-09", "ovs_interfaceid": "6d5e078c-0949-4438-b462-46bbef0c8f41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1326.806850] env[68673]: DEBUG oslo_concurrency.lockutils [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] Acquired lock "refresh_cache-c12e8044-6e77-44a6-866e-1036f69113a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.807049] env[68673]: DEBUG nova.network.neutron [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Refreshing network info cache for port 6d5e078c-0949-4438-b462-46bbef0c8f41 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1326.808148] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:8c:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d5e078c-0949-4438-b462-46bbef0c8f41', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1326.816734] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating folder: Project (c8ecabf65f524fb5bfcb60401c45db96). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1326.819752] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b97b25d-563a-4947-bd4e-8ad08f9dc25a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.831350] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Created folder: Project (c8ecabf65f524fb5bfcb60401c45db96) in parent group-v685311. [ 1326.831535] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating folder: Instances. Parent ref: group-v685385. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1326.831763] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4340a72a-9bd2-465c-b4e4-b490a2ef8669 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.842362] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Created folder: Instances in parent group-v685385. [ 1326.842599] env[68673]: DEBUG oslo.service.loopingcall [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1326.842810] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1326.843064] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1e11fdf-bcc8-46ca-be0e-c70a6c2a57d3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.863514] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1326.863514] env[68673]: value = "task-3433542" [ 1326.863514] env[68673]: _type = "Task" [ 1326.863514] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.871729] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433542, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.137596] env[68673]: DEBUG nova.network.neutron [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Updated VIF entry in instance network info cache for port 6d5e078c-0949-4438-b462-46bbef0c8f41. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1327.137938] env[68673]: DEBUG nova.network.neutron [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Updating instance_info_cache with network_info: [{"id": "6d5e078c-0949-4438-b462-46bbef0c8f41", "address": "fa:16:3e:3f:8c:6d", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d5e078c-09", "ovs_interfaceid": "6d5e078c-0949-4438-b462-46bbef0c8f41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.147988] env[68673]: DEBUG oslo_concurrency.lockutils [req-c6de5efb-5185-46a9-813d-cee72f1966cc req-243f1c57-58de-4ee2-a1e7-b4abac3a57a3 service nova] Releasing lock "refresh_cache-c12e8044-6e77-44a6-866e-1036f69113a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.373460] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433542, 'name': CreateVM_Task, 'duration_secs': 0.294322} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.373704] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1327.374320] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.374481] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.374793] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1327.375044] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f18e3ce4-5901-4f8c-939d-633fd81a70a0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.379127] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 1327.379127] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52278b93-6be4-6e11-419d-6aa45bbbbbba" [ 1327.379127] env[68673]: _type = "Task" [ 1327.379127] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.386180] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52278b93-6be4-6e11-419d-6aa45bbbbbba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.889290] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.889571] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1327.889807] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.073844] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquiring lock "218a1129-966d-4512-8b4b-222d31ceb106" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.074199] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "218a1129-966d-4512-8b4b-222d31ceb106" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.977945] env[68673]: DEBUG oslo_concurrency.lockutils [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "c12e8044-6e77-44a6-866e-1036f69113a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.803614] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.784197] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.784372] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1366.784706] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1366.785148] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1366.785148] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1366.810598] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.810772] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.810967] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.811132] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.811259] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.811383] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.811504] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.811623] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.811738] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.811853] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1366.811969] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1366.812529] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.784324] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.783576] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.783868] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.796034] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.796162] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.796355] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.796575] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1369.797832] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87a796b-b12f-44a8-aabf-4d26007858e6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.807603] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60171143-af1f-48c8-94ed-12b46b135fea {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.821739] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff524c4-e4e5-468e-9f57-15244e2fde32 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.828046] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12383ce4-c3dd-482a-a0ab-2d6d727fc4c8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.860028] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180923MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1369.860028] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.860028] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.932402] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.932624] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f4e540de-0b46-424b-894d-8ec0416d9828 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.932761] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.932886] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.933009] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.933142] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.933258] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.933372] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.933483] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.934023] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1369.946440] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 42eae7a3-1757-40f6-a194-58a4eb1ee3a4 has been deleted (perhaps locally). Deleting allocations that remained for this instance against this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1750}} [ 1369.977359] env[68673]: WARNING nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Unable to delete allocation for instance 42eae7a3-1757-40f6-a194-58a4eb1ee3a4: (409 {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n consumer generation conflict - expected null but got 1 ", "code": "placement.concurrent_update", "request_id": "req-27a94245-08eb-46d9-a639-a91cd4da6fe2"}]}) [ 1369.977607] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.118s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.978842] env[68673]: ERROR nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.AllocationDeleteFailed: Failed to delete allocations for consumer 42eae7a3-1757-40f6-a194-58a4eb1ee3a4. Error: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n consumer generation conflict - expected null but got 1 ", "code": "placement.concurrent_update", "request_id": "req-27a94245-08eb-46d9-a639-a91cd4da6fe2"}]} [ 1369.978842] env[68673]: ERROR nova.compute.manager Traceback (most recent call last): [ 1369.978842] env[68673]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 10574, in _update_available_resource_for_node [ 1369.978842] env[68673]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 1369.978842] env[68673]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 935, in update_available_resource [ 1369.978842] env[68673]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 1369.978842] env[68673]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1369.978842] env[68673]: ERROR nova.compute.manager return f(*args, **kwargs) [ 1369.978842] env[68673]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1045, in _update_available_resource [ 1369.978842] env[68673]: ERROR nova.compute.manager self._remove_deleted_instances_allocations( [ 1369.978842] env[68673]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1757, in _remove_deleted_instances_allocations [ 1369.978842] env[68673]: ERROR nova.compute.manager self.reportclient.delete_allocation_for_instance(context, [ 1369.978842] env[68673]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 120, in wrapper [ 1369.979292] env[68673]: ERROR nova.compute.manager return f(self, *a, **k) [ 1369.979292] env[68673]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 2206, in delete_allocation_for_instance [ 1369.979292] env[68673]: ERROR nova.compute.manager raise exception.AllocationDeleteFailed(consumer_uuid=uuid, [ 1369.979292] env[68673]: ERROR nova.compute.manager nova.exception.AllocationDeleteFailed: Failed to delete allocations for consumer 42eae7a3-1757-40f6-a194-58a4eb1ee3a4. Error: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n consumer generation conflict - expected null but got 1 ", "code": "placement.concurrent_update", "request_id": "req-27a94245-08eb-46d9-a639-a91cd4da6fe2"}]} [ 1369.979292] env[68673]: ERROR nova.compute.manager [ 1370.771965] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e6111626-7de0-44a3-8fd6-9530ba5bc67b tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "c40c36ba-3be1-4be5-a28e-dd6080a0b7cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.772174] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e6111626-7de0-44a3-8fd6-9530ba5bc67b tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "c40c36ba-3be1-4be5-a28e-dd6080a0b7cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.977966] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.783789] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.885481] env[68673]: WARNING oslo_vmware.rw_handles [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1371.885481] env[68673]: ERROR oslo_vmware.rw_handles [ 1371.886069] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1371.888608] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1371.888892] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Copying Virtual Disk [datastore1] vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/177e53e0-11e7-4830-8d35-f7944cd48743/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1371.889888] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-297b008c-d7fc-4149-8692-c014922030fc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.897984] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for the task: (returnval){ [ 1371.897984] env[68673]: value = "task-3433543" [ 1371.897984] env[68673]: _type = "Task" [ 1371.897984] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.905995] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': task-3433543, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.408132] env[68673]: DEBUG oslo_vmware.exceptions [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1372.408446] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.409015] env[68673]: ERROR nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1372.409015] env[68673]: Faults: ['InvalidArgument'] [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Traceback (most recent call last): [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] yield resources [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] self.driver.spawn(context, instance, image_meta, [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] self._fetch_image_if_missing(context, vi) [ 1372.409015] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] image_cache(vi, tmp_image_ds_loc) [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] vm_util.copy_virtual_disk( [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] session._wait_for_task(vmdk_copy_task) [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] return self.wait_for_task(task_ref) [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] return evt.wait() [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] result = hub.switch() [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1372.409465] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] return self.greenlet.switch() [ 1372.410075] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1372.410075] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] self.f(*self.args, **self.kw) [ 1372.410075] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1372.410075] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] raise exceptions.translate_fault(task_info.error) [ 1372.410075] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1372.410075] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Faults: ['InvalidArgument'] [ 1372.410075] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] [ 1372.410075] env[68673]: INFO nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Terminating instance [ 1372.411788] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1372.412010] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1372.412328] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.412525] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1372.413307] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e274da49-dab8-427c-982c-b58525d8fe37 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.416077] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd2aec96-5f8b-4d30-a705-16eaf521041f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.422558] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1372.422874] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfb29205-00a9-4e0b-90cf-56000bcc51ab {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.425537] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1372.425757] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1372.426975] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-144e9c8d-608f-4b28-8b35-8fdc602447df {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.432093] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for the task: (returnval){ [ 1372.432093] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]527b6990-178f-d266-7bd5-67534b378c08" [ 1372.432093] env[68673]: _type = "Task" [ 1372.432093] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.439994] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]527b6990-178f-d266-7bd5-67534b378c08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.497604] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1372.497852] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1372.498048] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Deleting the datastore file [datastore1] 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1372.498314] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72f7c4ba-030a-411c-86f4-5fd91b3c304a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.504461] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for the task: (returnval){ [ 1372.504461] env[68673]: value = "task-3433545" [ 1372.504461] env[68673]: _type = "Task" [ 1372.504461] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.512397] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': task-3433545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.942680] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1372.942680] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Creating directory with path [datastore1] vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1372.942680] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7def62e8-3a53-4f3e-820d-227bf2af8e28 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.955350] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Created directory with path [datastore1] vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1372.955584] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Fetch image to [datastore1] vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1372.955705] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1372.956704] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe619d6-41af-4415-86c7-21e3b9979307 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.965735] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8b07d4-821c-4b9c-86bd-b5a9335d5771 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.975208] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191e67b5-716e-4c5b-8951-7a6e9b1e3779 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.009707] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b803388f-bbfc-479c-8e47-fb8a45ed9a09 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.016867] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': task-3433545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074872} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.018423] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1373.018661] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1373.018844] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1373.019054] env[68673]: INFO nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1373.021552] env[68673]: DEBUG nova.compute.claims [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1373.021704] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.021917] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.025023] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1641015b-bf5c-43c3-8a25-1e9c4f1a1acf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.046641] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1373.102340] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1373.191212] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1373.191212] env[68673]: DEBUG oslo_vmware.rw_handles [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1373.393765] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46018e9-b6c2-42e6-94e1-3ccbe744257a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.401711] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc53633b-2650-45ea-bb32-55cbf6a1af86 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.433081] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0c4a49-896c-463c-8dfa-a9bb077aa4a4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.440471] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2b6f79-a9d0-4c6a-8c96-fc88aac69544 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.453957] env[68673]: DEBUG nova.compute.provider_tree [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.462812] env[68673]: DEBUG nova.scheduler.client.report [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1373.479913] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.458s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.480514] env[68673]: ERROR nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1373.480514] env[68673]: Faults: ['InvalidArgument'] [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Traceback (most recent call last): [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] self.driver.spawn(context, instance, image_meta, [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] self._fetch_image_if_missing(context, vi) [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] image_cache(vi, tmp_image_ds_loc) [ 1373.480514] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] vm_util.copy_virtual_disk( [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] session._wait_for_task(vmdk_copy_task) [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] return self.wait_for_task(task_ref) [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] return evt.wait() [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] result = hub.switch() [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] return self.greenlet.switch() [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1373.481033] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] self.f(*self.args, **self.kw) [ 1373.481534] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1373.481534] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] raise exceptions.translate_fault(task_info.error) [ 1373.481534] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1373.481534] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Faults: ['InvalidArgument'] [ 1373.481534] env[68673]: ERROR nova.compute.manager [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] [ 1373.481534] env[68673]: DEBUG nova.compute.utils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1373.482960] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Build of instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 was re-scheduled: A specified parameter was not correct: fileType [ 1373.482960] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1373.483412] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1373.483610] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1373.483785] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1373.483964] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1373.846710] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.859028] env[68673]: INFO nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Took 0.37 seconds to deallocate network for instance. [ 1373.952425] env[68673]: INFO nova.scheduler.client.report [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Deleted allocations for instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 [ 1373.972067] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 501.972s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.972897] env[68673]: DEBUG oslo_concurrency.lockutils [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 304.162s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.973184] env[68673]: DEBUG oslo_concurrency.lockutils [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.973440] env[68673]: DEBUG oslo_concurrency.lockutils [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.973652] env[68673]: DEBUG oslo_concurrency.lockutils [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.975662] env[68673]: INFO nova.compute.manager [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Terminating instance [ 1373.977610] env[68673]: DEBUG nova.compute.manager [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1373.977853] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1373.978386] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9dfdb35-5623-4054-a3e5-7bcf3c6ba93e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.984790] env[68673]: DEBUG nova.compute.manager [None req-b9f1d9b2-92a5-4382-ba41-252906d6a5ce tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 9bbfc4f2-cb23-4813-808d-ab03a97acfbd] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1373.991180] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b99d4b6-7d1c-409d-89c0-d1a10a93ee1c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.007740] env[68673]: DEBUG nova.compute.manager [None req-b9f1d9b2-92a5-4382-ba41-252906d6a5ce tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 9bbfc4f2-cb23-4813-808d-ab03a97acfbd] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1374.022343] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19 could not be found. [ 1374.022542] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1374.022717] env[68673]: INFO nova.compute.manager [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1374.022955] env[68673]: DEBUG oslo.service.loopingcall [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1374.025052] env[68673]: DEBUG nova.compute.manager [-] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1374.025157] env[68673]: DEBUG nova.network.neutron [-] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1374.034993] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b9f1d9b2-92a5-4382-ba41-252906d6a5ce tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "9bbfc4f2-cb23-4813-808d-ab03a97acfbd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.113s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.045390] env[68673]: DEBUG nova.compute.manager [None req-5231cdf5-9940-486e-91ed-f99e4c3c08c9 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 42eae7a3-1757-40f6-a194-58a4eb1ee3a4] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1374.054819] env[68673]: DEBUG nova.network.neutron [-] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.077077] env[68673]: DEBUG nova.compute.manager [None req-5231cdf5-9940-486e-91ed-f99e4c3c08c9 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 42eae7a3-1757-40f6-a194-58a4eb1ee3a4] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1374.078822] env[68673]: INFO nova.compute.manager [-] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] Took 0.05 seconds to deallocate network for instance. [ 1374.095903] env[68673]: DEBUG oslo_concurrency.lockutils [None req-5231cdf5-9940-486e-91ed-f99e4c3c08c9 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "42eae7a3-1757-40f6-a194-58a4eb1ee3a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.442s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.104014] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1374.155464] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.155680] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.157398] env[68673]: INFO nova.compute.claims [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.166668] env[68673]: DEBUG oslo_concurrency.lockutils [None req-66e7b12c-fb1b-4497-a5af-47543dbc92bd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.166668] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 162.043s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.166862] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 04db5d3f-1a7f-49e4-be8a-ef9563ac5d19] During sync_power_state the instance has a pending task (deleting). Skip. [ 1374.166912] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "04db5d3f-1a7f-49e4-be8a-ef9563ac5d19" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.388167] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5987110d-b11d-4d19-9a4f-c537eb3a9456 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.396023] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465111af-f92e-4438-99b5-56a351bc9789 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.424697] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590e7daa-e401-4870-beac-0863a6231310 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.431292] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6612dc8-e78b-471e-9bc4-808e9bf6104f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.444746] env[68673]: DEBUG nova.compute.provider_tree [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1374.453009] env[68673]: DEBUG nova.scheduler.client.report [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1374.467084] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.311s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.467539] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1374.498978] env[68673]: DEBUG nova.compute.utils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1374.500420] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1374.500592] env[68673]: DEBUG nova.network.neutron [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1374.508700] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1374.558615] env[68673]: DEBUG nova.policy [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f617cfb6919840ad99e1320228344b18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c73dd528430445bb8717487ffd7dd780', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1374.570536] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1374.597039] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1374.597456] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1374.597662] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1374.597867] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1374.598030] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1374.598185] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1374.598415] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1374.598597] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1374.598766] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1374.598930] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1374.599120] env[68673]: DEBUG nova.virt.hardware [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1374.599980] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7957fb-8c8e-4eb8-8eaf-dcf4eb94a330 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.608186] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b993d6-3292-476c-b7e5-6a961c80cd14 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.096991] env[68673]: DEBUG nova.network.neutron [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Successfully created port: dab19f78-1393-44df-b3b0-0bea903ef6ac {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.749591] env[68673]: DEBUG nova.network.neutron [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Successfully updated port: dab19f78-1393-44df-b3b0-0bea903ef6ac {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1375.760379] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "refresh_cache-bb290679-267b-4dc2-8337-896d5208c6cd" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.760772] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "refresh_cache-bb290679-267b-4dc2-8337-896d5208c6cd" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.760994] env[68673]: DEBUG nova.network.neutron [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1375.827831] env[68673]: DEBUG nova.network.neutron [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1375.895363] env[68673]: DEBUG nova.compute.manager [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Received event network-vif-plugged-dab19f78-1393-44df-b3b0-0bea903ef6ac {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1375.895585] env[68673]: DEBUG oslo_concurrency.lockutils [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] Acquiring lock "bb290679-267b-4dc2-8337-896d5208c6cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.895788] env[68673]: DEBUG oslo_concurrency.lockutils [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] Lock "bb290679-267b-4dc2-8337-896d5208c6cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.896145] env[68673]: DEBUG oslo_concurrency.lockutils [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] Lock "bb290679-267b-4dc2-8337-896d5208c6cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.896432] env[68673]: DEBUG nova.compute.manager [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] No waiting events found dispatching network-vif-plugged-dab19f78-1393-44df-b3b0-0bea903ef6ac {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1375.896649] env[68673]: WARNING nova.compute.manager [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Received unexpected event network-vif-plugged-dab19f78-1393-44df-b3b0-0bea903ef6ac for instance with vm_state building and task_state spawning. [ 1375.896946] env[68673]: DEBUG nova.compute.manager [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Received event network-changed-dab19f78-1393-44df-b3b0-0bea903ef6ac {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1375.897164] env[68673]: DEBUG nova.compute.manager [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Refreshing instance network info cache due to event network-changed-dab19f78-1393-44df-b3b0-0bea903ef6ac. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1375.897377] env[68673]: DEBUG oslo_concurrency.lockutils [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] Acquiring lock "refresh_cache-bb290679-267b-4dc2-8337-896d5208c6cd" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.048287] env[68673]: DEBUG nova.network.neutron [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Updating instance_info_cache with network_info: [{"id": "dab19f78-1393-44df-b3b0-0bea903ef6ac", "address": "fa:16:3e:a1:1e:1e", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdab19f78-13", "ovs_interfaceid": "dab19f78-1393-44df-b3b0-0bea903ef6ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.060490] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Releasing lock "refresh_cache-bb290679-267b-4dc2-8337-896d5208c6cd" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.060731] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Instance network_info: |[{"id": "dab19f78-1393-44df-b3b0-0bea903ef6ac", "address": "fa:16:3e:a1:1e:1e", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdab19f78-13", "ovs_interfaceid": "dab19f78-1393-44df-b3b0-0bea903ef6ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1376.061043] env[68673]: DEBUG oslo_concurrency.lockutils [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] Acquired lock "refresh_cache-bb290679-267b-4dc2-8337-896d5208c6cd" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.061224] env[68673]: DEBUG nova.network.neutron [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Refreshing network info cache for port dab19f78-1393-44df-b3b0-0bea903ef6ac {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1376.062334] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:1e:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dab19f78-1393-44df-b3b0-0bea903ef6ac', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1376.070913] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating folder: Project (c73dd528430445bb8717487ffd7dd780). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1376.071969] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07e7735b-c287-4976-870b-d58e59b76836 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.084870] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Created folder: Project (c73dd528430445bb8717487ffd7dd780) in parent group-v685311. [ 1376.085332] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating folder: Instances. Parent ref: group-v685388. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1376.085684] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-422e12c9-9b32-4c1d-9f31-3956644b8780 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.094775] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Created folder: Instances in parent group-v685388. [ 1376.095154] env[68673]: DEBUG oslo.service.loopingcall [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1376.097014] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1376.097014] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-efd771f2-643f-44ab-a449-0f89fd6ed4d5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.115181] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1376.115181] env[68673]: value = "task-3433548" [ 1376.115181] env[68673]: _type = "Task" [ 1376.115181] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.122508] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433548, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.378956] env[68673]: DEBUG nova.network.neutron [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Updated VIF entry in instance network info cache for port dab19f78-1393-44df-b3b0-0bea903ef6ac. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1376.379377] env[68673]: DEBUG nova.network.neutron [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Updating instance_info_cache with network_info: [{"id": "dab19f78-1393-44df-b3b0-0bea903ef6ac", "address": "fa:16:3e:a1:1e:1e", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdab19f78-13", "ovs_interfaceid": "dab19f78-1393-44df-b3b0-0bea903ef6ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.389381] env[68673]: DEBUG oslo_concurrency.lockutils [req-cd5f8948-ac7a-4377-bda6-3ecd3debd16f req-e6003dcf-79a9-472c-83cc-1e62458282a7 service nova] Releasing lock "refresh_cache-bb290679-267b-4dc2-8337-896d5208c6cd" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.626049] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433548, 'name': CreateVM_Task, 'duration_secs': 0.287103} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.626049] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1376.626696] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.626867] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.627265] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1376.627516] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94e822c5-b477-4656-9b62-1733e91e26db {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.631764] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 1376.631764] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]526602e6-db32-3d58-5357-93a8d32574fa" [ 1376.631764] env[68673]: _type = "Task" [ 1376.631764] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.639622] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]526602e6-db32-3d58-5357-93a8d32574fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.141834] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.142117] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1377.142334] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.972286] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "bb290679-267b-4dc2-8337-896d5208c6cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.912024] env[68673]: WARNING oslo_vmware.rw_handles [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1418.912024] env[68673]: ERROR oslo_vmware.rw_handles [ 1418.912024] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1418.913213] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1418.916101] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Copying Virtual Disk [datastore1] vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/6fe0a136-670c-4623-9ce9-220c38855515/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1418.916101] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2361a0b5-00cb-49af-8116-4091d2e614de {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.922531] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for the task: (returnval){ [ 1418.922531] env[68673]: value = "task-3433549" [ 1418.922531] env[68673]: _type = "Task" [ 1418.922531] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.937011] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': task-3433549, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.432651] env[68673]: DEBUG oslo_vmware.exceptions [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1419.434037] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.434037] env[68673]: ERROR nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1419.434037] env[68673]: Faults: ['InvalidArgument'] [ 1419.434037] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Traceback (most recent call last): [ 1419.434037] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1419.434037] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] yield resources [ 1419.434037] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1419.434037] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] self.driver.spawn(context, instance, image_meta, [ 1419.434037] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1419.434037] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] self._fetch_image_if_missing(context, vi) [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] image_cache(vi, tmp_image_ds_loc) [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] vm_util.copy_virtual_disk( [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] session._wait_for_task(vmdk_copy_task) [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] return self.wait_for_task(task_ref) [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] return evt.wait() [ 1419.434298] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] result = hub.switch() [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] return self.greenlet.switch() [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] self.f(*self.args, **self.kw) [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] raise exceptions.translate_fault(task_info.error) [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Faults: ['InvalidArgument'] [ 1419.434557] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] [ 1419.434557] env[68673]: INFO nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Terminating instance [ 1419.435629] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.435843] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1419.436101] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f224e03c-f97d-46e8-b83d-cb92bc50b8bc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.438419] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1419.438611] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1419.439339] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d527f3c-61e0-41fd-885a-b35d89a81acc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.445815] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1419.446117] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae1ce242-d86b-4b46-b795-62d109bbf797 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.448445] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1419.448616] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1419.449602] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0466a3c5-b73e-4220-a096-1efe1722a3a0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.454216] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Waiting for the task: (returnval){ [ 1419.454216] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]520162c5-1c3a-4f7f-d0e6-6a8262870493" [ 1419.454216] env[68673]: _type = "Task" [ 1419.454216] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.465879] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]520162c5-1c3a-4f7f-d0e6-6a8262870493, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.509834] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1419.510068] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1419.510257] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Deleting the datastore file [datastore1] f4e540de-0b46-424b-894d-8ec0416d9828 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1419.510511] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ed8d181-e044-4e2a-8404-7d7526fe5476 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.516426] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for the task: (returnval){ [ 1419.516426] env[68673]: value = "task-3433551" [ 1419.516426] env[68673]: _type = "Task" [ 1419.516426] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.525134] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': task-3433551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.965115] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1419.965389] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Creating directory with path [datastore1] vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1419.965681] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8741a772-1699-4df0-b37a-7eef79c25749 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.976944] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Created directory with path [datastore1] vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1419.977213] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Fetch image to [datastore1] vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1419.977468] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1419.978169] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a71aeb-9b09-4ff8-bed1-210c0b0310ac {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.984907] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c51701-694e-4ae5-aaf0-db13d0611d08 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.994230] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34407796-cdc6-4e64-83ec-9fb39ab4a5f6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.028211] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ccbe74-64a3-4080-bc57-8ca20c74087e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.036690] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-033f4367-80f4-4db3-b5bf-2c3952238e48 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.038431] env[68673]: DEBUG oslo_vmware.api [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Task: {'id': task-3433551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069477} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.038677] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1420.038858] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1420.039040] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1420.039226] env[68673]: INFO nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1420.041349] env[68673]: DEBUG nova.compute.claims [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1420.041532] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.041753] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.061095] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1420.124058] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1420.188857] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1420.189065] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1420.326065] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4e2763-ce89-46f1-b013-36e5955d4e71 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.332748] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2ff323-39fa-4d5d-9bbf-a7e07771a128 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.363398] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f42273-7418-476d-aeac-a7e1bad7aaff {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.370174] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd31c6ab-1c10-479a-b8ec-f5c76a99edd1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.382950] env[68673]: DEBUG nova.compute.provider_tree [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.392997] env[68673]: DEBUG nova.scheduler.client.report [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1420.406971] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.365s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.407518] env[68673]: ERROR nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1420.407518] env[68673]: Faults: ['InvalidArgument'] [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Traceback (most recent call last): [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] self.driver.spawn(context, instance, image_meta, [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] self._fetch_image_if_missing(context, vi) [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] image_cache(vi, tmp_image_ds_loc) [ 1420.407518] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] vm_util.copy_virtual_disk( [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] session._wait_for_task(vmdk_copy_task) [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] return self.wait_for_task(task_ref) [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] return evt.wait() [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] result = hub.switch() [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] return self.greenlet.switch() [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1420.407849] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] self.f(*self.args, **self.kw) [ 1420.408204] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1420.408204] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] raise exceptions.translate_fault(task_info.error) [ 1420.408204] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1420.408204] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Faults: ['InvalidArgument'] [ 1420.408204] env[68673]: ERROR nova.compute.manager [instance: f4e540de-0b46-424b-894d-8ec0416d9828] [ 1420.408319] env[68673]: DEBUG nova.compute.utils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1420.409633] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Build of instance f4e540de-0b46-424b-894d-8ec0416d9828 was re-scheduled: A specified parameter was not correct: fileType [ 1420.409633] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1420.409988] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1420.410201] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1420.410378] env[68673]: DEBUG nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1420.410541] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1420.677691] env[68673]: DEBUG nova.network.neutron [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.689185] env[68673]: INFO nova.compute.manager [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Took 0.28 seconds to deallocate network for instance. [ 1420.776189] env[68673]: INFO nova.scheduler.client.report [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Deleted allocations for instance f4e540de-0b46-424b-894d-8ec0416d9828 [ 1420.795873] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e42ff482-c70d-46a2-bfb2-489f014a5cdd tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "f4e540de-0b46-424b-894d-8ec0416d9828" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 548.766s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.797044] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "f4e540de-0b46-424b-894d-8ec0416d9828" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 350.915s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.797276] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Acquiring lock "f4e540de-0b46-424b-894d-8ec0416d9828-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.797481] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "f4e540de-0b46-424b-894d-8ec0416d9828-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.797645] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "f4e540de-0b46-424b-894d-8ec0416d9828-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.799650] env[68673]: INFO nova.compute.manager [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Terminating instance [ 1420.801409] env[68673]: DEBUG nova.compute.manager [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1420.801515] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1420.801984] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f6f2279-8af7-4061-a96a-b5bc5ad7797a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.812007] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcf4882-2118-4fee-9638-b6a33acb25cf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.824307] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1420.845076] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f4e540de-0b46-424b-894d-8ec0416d9828 could not be found. [ 1420.845283] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1420.845457] env[68673]: INFO nova.compute.manager [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1420.845697] env[68673]: DEBUG oslo.service.loopingcall [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1420.845926] env[68673]: DEBUG nova.compute.manager [-] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1420.846035] env[68673]: DEBUG nova.network.neutron [-] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1420.868048] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.868295] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.869710] env[68673]: INFO nova.compute.claims [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1420.872447] env[68673]: DEBUG nova.network.neutron [-] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.879604] env[68673]: INFO nova.compute.manager [-] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] Took 0.03 seconds to deallocate network for instance. [ 1420.994700] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130dbe9e-8461-4ea0-b315-fd561ba929c6 tempest-MultipleCreateTestJSON-1292099291 tempest-MultipleCreateTestJSON-1292099291-project-member] Lock "f4e540de-0b46-424b-894d-8ec0416d9828" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.995547] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "f4e540de-0b46-424b-894d-8ec0416d9828" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 208.871s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.995739] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f4e540de-0b46-424b-894d-8ec0416d9828] During sync_power_state the instance has a pending task (deleting). Skip. [ 1420.995913] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "f4e540de-0b46-424b-894d-8ec0416d9828" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.108018] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16b708c-3bbf-4deb-947e-5f6501ceef7a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.113510] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f2c0ea-bb5a-4f6b-a943-679c22b1accc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.144366] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fa01de-f6ee-474a-9a57-ff27c3b6f252 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.151548] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e383eeea-2499-43d8-abcf-5a1bdbdb4063 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.164455] env[68673]: DEBUG nova.compute.provider_tree [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1421.173419] env[68673]: DEBUG nova.scheduler.client.report [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1421.188196] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.320s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.188696] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1421.222804] env[68673]: DEBUG nova.compute.utils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1421.224488] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1421.224488] env[68673]: DEBUG nova.network.neutron [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1421.234664] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1421.304020] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1421.320663] env[68673]: DEBUG nova.policy [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc9567858bc84a529e5a06060756750d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e47bd9e67584eaaa14567b4b999f37e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1421.336017] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1421.336268] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1421.336484] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1421.336607] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1421.336750] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1421.336996] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1421.337378] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1421.337575] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1421.337778] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1421.337917] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1421.338137] env[68673]: DEBUG nova.virt.hardware [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1421.339269] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1361501d-4097-40d4-8382-d1b5078b0a0c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.347440] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac23612e-b986-4f1d-8432-1de21894b083 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.694376] env[68673]: DEBUG nova.network.neutron [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Successfully created port: b043abc5-a68d-42dc-8f5e-afad1eb88ff8 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1422.693335] env[68673]: DEBUG nova.network.neutron [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Successfully updated port: b043abc5-a68d-42dc-8f5e-afad1eb88ff8 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1422.714178] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquiring lock "refresh_cache-76697868-c920-43d0-ab11-fcdff2e38dc1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.714533] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquired lock "refresh_cache-76697868-c920-43d0-ab11-fcdff2e38dc1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.714533] env[68673]: DEBUG nova.network.neutron [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1422.733715] env[68673]: DEBUG nova.compute.manager [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Received event network-vif-plugged-b043abc5-a68d-42dc-8f5e-afad1eb88ff8 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1422.736303] env[68673]: DEBUG oslo_concurrency.lockutils [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] Acquiring lock "76697868-c920-43d0-ab11-fcdff2e38dc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.736303] env[68673]: DEBUG oslo_concurrency.lockutils [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.736303] env[68673]: DEBUG oslo_concurrency.lockutils [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.736303] env[68673]: DEBUG nova.compute.manager [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] No waiting events found dispatching network-vif-plugged-b043abc5-a68d-42dc-8f5e-afad1eb88ff8 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1422.736402] env[68673]: WARNING nova.compute.manager [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Received unexpected event network-vif-plugged-b043abc5-a68d-42dc-8f5e-afad1eb88ff8 for instance with vm_state building and task_state spawning. [ 1422.736402] env[68673]: DEBUG nova.compute.manager [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Received event network-changed-b043abc5-a68d-42dc-8f5e-afad1eb88ff8 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1422.736537] env[68673]: DEBUG nova.compute.manager [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Refreshing instance network info cache due to event network-changed-b043abc5-a68d-42dc-8f5e-afad1eb88ff8. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1422.736927] env[68673]: DEBUG oslo_concurrency.lockutils [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] Acquiring lock "refresh_cache-76697868-c920-43d0-ab11-fcdff2e38dc1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.770024] env[68673]: DEBUG nova.network.neutron [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1422.997961] env[68673]: DEBUG nova.network.neutron [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Updating instance_info_cache with network_info: [{"id": "b043abc5-a68d-42dc-8f5e-afad1eb88ff8", "address": "fa:16:3e:43:7e:08", "network": {"id": "5deead96-4a42-424c-9029-934d68a54c5a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-590887566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e47bd9e67584eaaa14567b4b999f37e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb043abc5-a6", "ovs_interfaceid": "b043abc5-a68d-42dc-8f5e-afad1eb88ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.016728] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Releasing lock "refresh_cache-76697868-c920-43d0-ab11-fcdff2e38dc1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.017121] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Instance network_info: |[{"id": "b043abc5-a68d-42dc-8f5e-afad1eb88ff8", "address": "fa:16:3e:43:7e:08", "network": {"id": "5deead96-4a42-424c-9029-934d68a54c5a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-590887566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e47bd9e67584eaaa14567b4b999f37e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb043abc5-a6", "ovs_interfaceid": "b043abc5-a68d-42dc-8f5e-afad1eb88ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1423.017436] env[68673]: DEBUG oslo_concurrency.lockutils [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] Acquired lock "refresh_cache-76697868-c920-43d0-ab11-fcdff2e38dc1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.017614] env[68673]: DEBUG nova.network.neutron [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Refreshing network info cache for port b043abc5-a68d-42dc-8f5e-afad1eb88ff8 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1423.022019] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:7e:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b043abc5-a68d-42dc-8f5e-afad1eb88ff8', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1423.028238] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Creating folder: Project (6e47bd9e67584eaaa14567b4b999f37e). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1423.031683] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b97254e2-6528-445b-a3cf-591d4f367691 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.043680] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Created folder: Project (6e47bd9e67584eaaa14567b4b999f37e) in parent group-v685311. [ 1423.043680] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Creating folder: Instances. Parent ref: group-v685391. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1423.044430] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5691c5fc-6aa6-4851-84dc-53fafd31929f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.054020] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Created folder: Instances in parent group-v685391. [ 1423.054241] env[68673]: DEBUG oslo.service.loopingcall [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.054422] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1423.054902] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05f83a20-0d23-4f26-8fde-9c8e969fb595 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.078718] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1423.078718] env[68673]: value = "task-3433554" [ 1423.078718] env[68673]: _type = "Task" [ 1423.078718] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.086554] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433554, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.340354] env[68673]: DEBUG nova.network.neutron [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Updated VIF entry in instance network info cache for port b043abc5-a68d-42dc-8f5e-afad1eb88ff8. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1423.340354] env[68673]: DEBUG nova.network.neutron [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Updating instance_info_cache with network_info: [{"id": "b043abc5-a68d-42dc-8f5e-afad1eb88ff8", "address": "fa:16:3e:43:7e:08", "network": {"id": "5deead96-4a42-424c-9029-934d68a54c5a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-590887566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e47bd9e67584eaaa14567b4b999f37e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb043abc5-a6", "ovs_interfaceid": "b043abc5-a68d-42dc-8f5e-afad1eb88ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.351276] env[68673]: DEBUG oslo_concurrency.lockutils [req-8043a6da-df06-417b-8b64-9992073bb15b req-10d204be-1af3-46bb-a7ee-061860a7c5b8 service nova] Releasing lock "refresh_cache-76697868-c920-43d0-ab11-fcdff2e38dc1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.593037] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433554, 'name': CreateVM_Task, 'duration_secs': 0.27878} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.593232] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1423.593900] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.594084] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.594417] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1423.594663] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8dc7444-0dd1-4e4f-8e6e-23130e242367 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.599682] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Waiting for the task: (returnval){ [ 1423.599682] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52b076e7-c874-32cc-c17e-ecf56a97ee68" [ 1423.599682] env[68673]: _type = "Task" [ 1423.599682] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.607747] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52b076e7-c874-32cc-c17e-ecf56a97ee68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.778933] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1423.853457] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.853457] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.110026] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.110305] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1424.110541] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.784505] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.784833] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1427.648057] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquiring lock "601dfed1-fb7b-413a-836d-7fda61314c73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.648385] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "601dfed1-fb7b-413a-836d-7fda61314c73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.785652] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.785652] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1427.785652] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1427.811509] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.811723] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.811896] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.812102] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.812269] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.812441] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.812594] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.812748] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.812877] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.812996] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1427.813154] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1427.813996] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1428.783196] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.784267] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.783517] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.797033] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.797323] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.797462] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.797621] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1430.798784] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c343278-f551-4c3b-99b2-bc60223fab16 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.808595] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70021a05-a5f7-43ff-8cf3-7174d773b32e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.823151] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370e0310-73b0-427d-817e-7567872c4683 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.829608] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3806610a-0c0e-48f2-a436-d13938b1150a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.860939] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180882MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1430.861117] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.861321] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.967370] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967370] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967370] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967370] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967783] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967783] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967783] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967783] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967897] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.967897] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1430.980767] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1430.994200] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance e5e06bf8-a836-47a7-87b8-47a04d0b3991 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.004963] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.021061] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c40c36ba-3be1-4be5-a28e-dd6080a0b7cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.037280] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.049131] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.049972] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1431.049972] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1431.293805] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc3df55-11e5-40ac-924e-329cec900013 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.302359] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a78bc7-66a3-43a8-959c-252b1b65d73c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.335211] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe8ef84-4e88-41fb-8994-16f08a63c142 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.342622] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359d414c-e3ee-4bea-a708-ff2408026570 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.355514] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.364900] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1431.381727] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1431.381727] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.520s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.934225] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquiring lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.934668] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.381604] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.783591] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.779585] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.076197] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquiring lock "76697868-c920-43d0-ab11-fcdff2e38dc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.931476] env[68673]: WARNING oslo_vmware.rw_handles [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1468.931476] env[68673]: ERROR oslo_vmware.rw_handles [ 1468.932085] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1468.933758] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1468.934014] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Copying Virtual Disk [datastore1] vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/08628e4c-bc2b-4600-af34-8892e3565f77/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1468.934319] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5d63dc2-a108-44a0-a763-d2334123643d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.941818] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Waiting for the task: (returnval){ [ 1468.941818] env[68673]: value = "task-3433555" [ 1468.941818] env[68673]: _type = "Task" [ 1468.941818] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.949882] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Task: {'id': task-3433555, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.453478] env[68673]: DEBUG oslo_vmware.exceptions [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1469.453730] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.454293] env[68673]: ERROR nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1469.454293] env[68673]: Faults: ['InvalidArgument'] [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Traceback (most recent call last): [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] yield resources [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] self.driver.spawn(context, instance, image_meta, [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] self._fetch_image_if_missing(context, vi) [ 1469.454293] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] image_cache(vi, tmp_image_ds_loc) [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] vm_util.copy_virtual_disk( [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] session._wait_for_task(vmdk_copy_task) [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] return self.wait_for_task(task_ref) [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] return evt.wait() [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] result = hub.switch() [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1469.454701] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] return self.greenlet.switch() [ 1469.455222] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1469.455222] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] self.f(*self.args, **self.kw) [ 1469.455222] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1469.455222] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] raise exceptions.translate_fault(task_info.error) [ 1469.455222] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1469.455222] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Faults: ['InvalidArgument'] [ 1469.455222] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] [ 1469.455222] env[68673]: INFO nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Terminating instance [ 1469.456226] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.456430] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1469.456656] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a91731aa-6abe-4eb3-8236-532a0f1ef36a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.458794] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1469.459007] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1469.459714] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92942b7-19f3-4654-b5ab-ab34abdb790e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.466346] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1469.466558] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ba20e3c-f9c7-4977-baa9-01d4405b2df4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.468665] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1469.468833] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1469.469771] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ef40430-b1c7-491f-859b-bdb1f4941454 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.474253] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for the task: (returnval){ [ 1469.474253] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5247e3c1-49de-b38c-92b9-ea873b9cd27d" [ 1469.474253] env[68673]: _type = "Task" [ 1469.474253] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.486714] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5247e3c1-49de-b38c-92b9-ea873b9cd27d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.538056] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1469.538056] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1469.538306] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Deleting the datastore file [datastore1] 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1469.538524] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97132e7e-e35d-40f8-b792-e06480549d83 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.543910] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Waiting for the task: (returnval){ [ 1469.543910] env[68673]: value = "task-3433557" [ 1469.543910] env[68673]: _type = "Task" [ 1469.543910] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.551161] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Task: {'id': task-3433557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.985621] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1469.985621] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Creating directory with path [datastore1] vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1469.985880] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe67785d-ff0f-40e8-9506-285aef3ad53f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.997903] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Created directory with path [datastore1] vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1469.998179] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Fetch image to [datastore1] vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1469.998363] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1469.999191] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459e2ec3-daa8-40ff-a215-3f98c4891c02 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.005858] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfa5dfa-f47a-4987-bf16-393feac4fdf5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.014939] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ab3524-cb05-46b7-9024-70917310f5fd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.049267] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac51fe62-2581-4fc6-be4b-1293f013331b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.056724] env[68673]: DEBUG oslo_vmware.api [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Task: {'id': task-3433557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073376} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.058261] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1470.058483] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1470.058631] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1470.058803] env[68673]: INFO nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1470.060650] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-603e85e5-9e56-4d1b-8535-1d1976d46da6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.062583] env[68673]: DEBUG nova.compute.claims [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1470.062778] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.062967] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.088094] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1470.150106] env[68673]: DEBUG oslo_vmware.rw_handles [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1470.210801] env[68673]: DEBUG oslo_vmware.rw_handles [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1470.211013] env[68673]: DEBUG oslo_vmware.rw_handles [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1470.370915] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27fabe1-43b9-4270-ab99-2ed9913c94a4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.378231] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f9b395-ed73-4834-a48b-c53b3c79baf8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.406865] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5863378-7f05-491e-803c-9ea3a8c347a8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.413684] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16de9d36-62ca-4973-b500-f39d9490b246 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.426819] env[68673]: DEBUG nova.compute.provider_tree [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.435390] env[68673]: DEBUG nova.scheduler.client.report [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1470.452103] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.389s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.452648] env[68673]: ERROR nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1470.452648] env[68673]: Faults: ['InvalidArgument'] [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Traceback (most recent call last): [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] self.driver.spawn(context, instance, image_meta, [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] self._fetch_image_if_missing(context, vi) [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] image_cache(vi, tmp_image_ds_loc) [ 1470.452648] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] vm_util.copy_virtual_disk( [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] session._wait_for_task(vmdk_copy_task) [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] return self.wait_for_task(task_ref) [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] return evt.wait() [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] result = hub.switch() [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] return self.greenlet.switch() [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1470.452940] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] self.f(*self.args, **self.kw) [ 1470.453231] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1470.453231] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] raise exceptions.translate_fault(task_info.error) [ 1470.453231] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1470.453231] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Faults: ['InvalidArgument'] [ 1470.453231] env[68673]: ERROR nova.compute.manager [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] [ 1470.453346] env[68673]: DEBUG nova.compute.utils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1470.454818] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Build of instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 was re-scheduled: A specified parameter was not correct: fileType [ 1470.454818] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1470.455197] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1470.455373] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1470.455539] env[68673]: DEBUG nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1470.455699] env[68673]: DEBUG nova.network.neutron [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1470.765386] env[68673]: DEBUG nova.network.neutron [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.776195] env[68673]: INFO nova.compute.manager [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Took 0.32 seconds to deallocate network for instance. [ 1470.869206] env[68673]: INFO nova.scheduler.client.report [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Deleted allocations for instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 [ 1470.889029] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7379949e-0a71-490c-bd8f-eec915e16a4f tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 594.895s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.890160] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 398.277s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.890427] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Acquiring lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.890653] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.890831] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.892814] env[68673]: INFO nova.compute.manager [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Terminating instance [ 1470.894485] env[68673]: DEBUG nova.compute.manager [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1470.894677] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1470.895152] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b92ad7b-2251-422c-938e-16290cc6a546 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.905974] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f8da05-e4bb-4050-b1b9-72032d3bcc11 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.916567] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1470.937255] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4 could not be found. [ 1470.937457] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1470.937634] env[68673]: INFO nova.compute.manager [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1470.937874] env[68673]: DEBUG oslo.service.loopingcall [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1470.938180] env[68673]: DEBUG nova.compute.manager [-] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1470.938243] env[68673]: DEBUG nova.network.neutron [-] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1470.961862] env[68673]: DEBUG nova.network.neutron [-] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.964124] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.964366] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.965847] env[68673]: INFO nova.compute.claims [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1470.970652] env[68673]: INFO nova.compute.manager [-] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] Took 0.03 seconds to deallocate network for instance. [ 1471.064023] env[68673]: DEBUG oslo_concurrency.lockutils [None req-df9a92c4-fa93-416a-a471-749df8036f04 tempest-ServersTestJSON-1580787357 tempest-ServersTestJSON-1580787357-project-member] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.065694] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 258.941s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.065902] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1471.066097] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "4ab0ab5d-f1ee-467f-8c9a-0ab3b41a4fc4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.217285] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dff209-c162-44e0-bb92-417317137c93 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.225551] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14d07b6-3b1e-4f30-a522-184c2ba5fddc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.257872] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b80503-7877-4727-97b3-08694795038d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.265286] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b40cd1f-4293-4a7d-9cb9-0bd28f398a63 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.278823] env[68673]: DEBUG nova.compute.provider_tree [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1471.289496] env[68673]: DEBUG nova.scheduler.client.report [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1471.304105] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.340s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.304589] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1471.340111] env[68673]: DEBUG nova.compute.utils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1471.341568] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1471.341743] env[68673]: DEBUG nova.network.neutron [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1471.351016] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1471.416693] env[68673]: DEBUG nova.policy [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fa21131a12b499da25d3b560cf45dc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '703b63ef1a6049a389f26eead64ab4d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1471.420260] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1471.444669] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1471.444916] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1471.445087] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1471.445277] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1471.445422] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1471.445566] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1471.445769] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1471.445926] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1471.446136] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1471.446311] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1471.446647] env[68673]: DEBUG nova.virt.hardware [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1471.447535] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06566b6d-4011-4090-813c-14ccf4194931 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.457720] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042cdaf4-83f2-4d52-8ef8-532185e7343b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.730884] env[68673]: DEBUG nova.network.neutron [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Successfully created port: c2f21960-8d5c-4414-8fdc-bf1af3c86def {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1472.411638] env[68673]: DEBUG nova.network.neutron [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Successfully updated port: c2f21960-8d5c-4414-8fdc-bf1af3c86def {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1472.434742] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "refresh_cache-f1f37193-2ef8-43aa-bde4-98399ce3f955" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.434881] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquired lock "refresh_cache-f1f37193-2ef8-43aa-bde4-98399ce3f955" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.435042] env[68673]: DEBUG nova.network.neutron [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1472.490084] env[68673]: DEBUG nova.network.neutron [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1472.694207] env[68673]: DEBUG nova.network.neutron [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Updating instance_info_cache with network_info: [{"id": "c2f21960-8d5c-4414-8fdc-bf1af3c86def", "address": "fa:16:3e:63:71:58", "network": {"id": "c73ba772-d126-4ca7-b2f8-5f3fb067da0c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1997758298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "703b63ef1a6049a389f26eead64ab4d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f21960-8d", "ovs_interfaceid": "c2f21960-8d5c-4414-8fdc-bf1af3c86def", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.706124] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Releasing lock "refresh_cache-f1f37193-2ef8-43aa-bde4-98399ce3f955" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.706414] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Instance network_info: |[{"id": "c2f21960-8d5c-4414-8fdc-bf1af3c86def", "address": "fa:16:3e:63:71:58", "network": {"id": "c73ba772-d126-4ca7-b2f8-5f3fb067da0c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1997758298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "703b63ef1a6049a389f26eead64ab4d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f21960-8d", "ovs_interfaceid": "c2f21960-8d5c-4414-8fdc-bf1af3c86def", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1472.706814] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:71:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd298db54-f13d-4bf6-b6c2-755074b3047f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2f21960-8d5c-4414-8fdc-bf1af3c86def', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1472.714670] env[68673]: DEBUG oslo.service.loopingcall [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1472.715111] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1472.715339] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c68dbcd6-84d3-48b9-8b72-9b75c61353da {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.735598] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1472.735598] env[68673]: value = "task-3433558" [ 1472.735598] env[68673]: _type = "Task" [ 1472.735598] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.743073] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433558, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.800713] env[68673]: DEBUG nova.compute.manager [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Received event network-vif-plugged-c2f21960-8d5c-4414-8fdc-bf1af3c86def {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1472.800896] env[68673]: DEBUG oslo_concurrency.lockutils [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] Acquiring lock "f1f37193-2ef8-43aa-bde4-98399ce3f955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.801127] env[68673]: DEBUG oslo_concurrency.lockutils [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.801300] env[68673]: DEBUG oslo_concurrency.lockutils [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.801468] env[68673]: DEBUG nova.compute.manager [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] No waiting events found dispatching network-vif-plugged-c2f21960-8d5c-4414-8fdc-bf1af3c86def {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1472.801658] env[68673]: WARNING nova.compute.manager [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Received unexpected event network-vif-plugged-c2f21960-8d5c-4414-8fdc-bf1af3c86def for instance with vm_state building and task_state spawning. [ 1472.801832] env[68673]: DEBUG nova.compute.manager [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Received event network-changed-c2f21960-8d5c-4414-8fdc-bf1af3c86def {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1472.802075] env[68673]: DEBUG nova.compute.manager [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Refreshing instance network info cache due to event network-changed-c2f21960-8d5c-4414-8fdc-bf1af3c86def. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1472.802268] env[68673]: DEBUG oslo_concurrency.lockutils [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] Acquiring lock "refresh_cache-f1f37193-2ef8-43aa-bde4-98399ce3f955" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.802412] env[68673]: DEBUG oslo_concurrency.lockutils [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] Acquired lock "refresh_cache-f1f37193-2ef8-43aa-bde4-98399ce3f955" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.802661] env[68673]: DEBUG nova.network.neutron [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Refreshing network info cache for port c2f21960-8d5c-4414-8fdc-bf1af3c86def {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1473.060500] env[68673]: DEBUG nova.network.neutron [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Updated VIF entry in instance network info cache for port c2f21960-8d5c-4414-8fdc-bf1af3c86def. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1473.060862] env[68673]: DEBUG nova.network.neutron [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Updating instance_info_cache with network_info: [{"id": "c2f21960-8d5c-4414-8fdc-bf1af3c86def", "address": "fa:16:3e:63:71:58", "network": {"id": "c73ba772-d126-4ca7-b2f8-5f3fb067da0c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1997758298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "703b63ef1a6049a389f26eead64ab4d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f21960-8d", "ovs_interfaceid": "c2f21960-8d5c-4414-8fdc-bf1af3c86def", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.070304] env[68673]: DEBUG oslo_concurrency.lockutils [req-32ffb465-6c32-4a92-9c69-30fcfd92b0b9 req-ba6cfa90-75f2-4c60-91b4-5b9b83f3dd96 service nova] Releasing lock "refresh_cache-f1f37193-2ef8-43aa-bde4-98399ce3f955" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.245430] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433558, 'name': CreateVM_Task, 'duration_secs': 0.284926} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.245606] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1473.246302] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.246465] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.246767] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1473.247008] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5c92078-4b4c-4ebb-b298-53c566d54adc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.251313] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for the task: (returnval){ [ 1473.251313] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]522500dd-6bf1-457c-2972-d5894487b6bb" [ 1473.251313] env[68673]: _type = "Task" [ 1473.251313] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.258516] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]522500dd-6bf1-457c-2972-d5894487b6bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.762240] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.762533] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1473.762658] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.913225] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.803639] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1486.783670] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1486.783849] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1487.784800] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1487.785210] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1487.785210] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1487.805444] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.805584] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.805717] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.805844] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.805967] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.806103] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.806226] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.806412] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.806532] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.806651] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1487.806767] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1487.807294] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1490.783181] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.783918] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.784241] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.810154] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.810154] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.810154] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.810154] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1491.810780] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd01455-e4f0-4694-b35f-a7b1e36fa375 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.819971] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131d89bb-a716-47c0-a53b-a036443d423c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.836747] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2c1d32-a577-4b57-9951-1d6792929062 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.848976] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52567450-7068-4b53-a89a-42438274a0a4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.883850] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180917MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1491.884030] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.884236] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.041703] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.041867] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.041998] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.042141] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.042262] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.043043] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.043043] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.043043] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.043043] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.043218] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.055249] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1492.067434] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c40c36ba-3be1-4be5-a28e-dd6080a0b7cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1492.079063] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1492.089267] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1492.100558] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1492.100779] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1492.100930] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1492.118799] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing inventories for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1492.137042] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Updating ProviderTree inventory for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1492.137240] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Updating inventory in ProviderTree for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1492.152465] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing aggregate associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, aggregates: None {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1492.185726] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing trait associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1492.402779] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc92b97b-ce8d-4b5c-b351-46d3c17d7c5a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.411578] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60a2413-523b-40da-b07f-f3bf3a5ec205 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.446317] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b67907-b59a-4fa0-a57a-18983dde4eee {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.455238] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da43f77f-16cb-44c1-9e44-c5a7fab0a7c0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.476614] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1492.490479] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1492.511939] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1492.511939] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.627s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.511155] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.784381] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.784687] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.959069] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquiring lock "3352e87c-38dd-4bfa-937c-644abc30cf76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.959437] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.793063] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.793063] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances with incomplete migration {{(pid=68673) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1504.795599] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1504.795834] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1504.806360] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] There are 0 instances to clean {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1516.937403] env[68673]: WARNING oslo_vmware.rw_handles [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1516.937403] env[68673]: ERROR oslo_vmware.rw_handles [ 1516.938027] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1516.939902] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1516.940217] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Copying Virtual Disk [datastore1] vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/ec465b26-3104-4a6c-87d9-3f86bf866768/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1516.940520] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-670307ae-675e-4019-9518-72a6f2719dd7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.948445] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for the task: (returnval){ [ 1516.948445] env[68673]: value = "task-3433559" [ 1516.948445] env[68673]: _type = "Task" [ 1516.948445] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.956269] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': task-3433559, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.460468] env[68673]: DEBUG oslo_vmware.exceptions [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1517.460753] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.461316] env[68673]: ERROR nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1517.461316] env[68673]: Faults: ['InvalidArgument'] [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Traceback (most recent call last): [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] yield resources [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] self.driver.spawn(context, instance, image_meta, [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] self._fetch_image_if_missing(context, vi) [ 1517.461316] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] image_cache(vi, tmp_image_ds_loc) [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] vm_util.copy_virtual_disk( [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] session._wait_for_task(vmdk_copy_task) [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] return self.wait_for_task(task_ref) [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] return evt.wait() [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] result = hub.switch() [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1517.461614] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] return self.greenlet.switch() [ 1517.462184] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1517.462184] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] self.f(*self.args, **self.kw) [ 1517.462184] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1517.462184] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] raise exceptions.translate_fault(task_info.error) [ 1517.462184] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1517.462184] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Faults: ['InvalidArgument'] [ 1517.462184] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] [ 1517.462184] env[68673]: INFO nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Terminating instance [ 1517.463210] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.463429] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1517.463664] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3855f734-f05b-4719-add1-a08b99a3bb79 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.465793] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1517.465979] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1517.466685] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0872bd-9d6e-45d5-8500-ec33f3ce213a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.473714] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1517.473962] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f93c177-cee4-44d2-a789-62e43b1370de {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.476144] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1517.476327] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1517.477298] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fcfe0d8-ceb9-4f1f-920f-ec3fccb3dd5d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.481917] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Waiting for the task: (returnval){ [ 1517.481917] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52a66f9e-2aae-405f-6374-c3e875413c5e" [ 1517.481917] env[68673]: _type = "Task" [ 1517.481917] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.491166] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52a66f9e-2aae-405f-6374-c3e875413c5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.542596] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1517.542596] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1517.542787] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Deleting the datastore file [datastore1] 94d40e8f-639a-4695-8d3d-1b0d81e29695 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1517.543052] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1566188d-4b83-4278-9c19-6cae3c099146 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.549411] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for the task: (returnval){ [ 1517.549411] env[68673]: value = "task-3433561" [ 1517.549411] env[68673]: _type = "Task" [ 1517.549411] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.556742] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': task-3433561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.992526] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1517.992788] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Creating directory with path [datastore1] vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1517.993039] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eafc0ab4-af35-42b8-9283-a56ec59642d6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.004327] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Created directory with path [datastore1] vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.004531] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Fetch image to [datastore1] vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1518.004718] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1518.005460] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8671673-4f1e-47e4-9e86-f25e60ed3c29 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.012183] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6b419b-35af-4b78-b95b-59b2173211e5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.021667] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdb9122-9bca-4be6-adb2-a55d254cc861 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.054100] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc4259d-f273-44b0-b9c4-6ec3a9636be5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.061207] env[68673]: DEBUG oslo_vmware.api [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': task-3433561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065006} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.062581] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1518.062773] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1518.062942] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1518.063126] env[68673]: INFO nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1518.065202] env[68673]: DEBUG nova.compute.claims [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1518.065377] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.065603] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.068249] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ecf3883d-2392-49fe-83e3-6409535d1c79 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.088970] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1518.142810] env[68673]: DEBUG oslo_vmware.rw_handles [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1518.203938] env[68673]: DEBUG oslo_vmware.rw_handles [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1518.203938] env[68673]: DEBUG oslo_vmware.rw_handles [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1518.342267] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5f72fd-e6be-4c11-a07f-29891b02df12 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.349889] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66d435f-0666-4d70-a603-7e5cf66c0d86 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.379347] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c56013-c063-4a22-af6f-34ab0c3a6e84 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.394361] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8834b9-7a6c-4215-b3f8-4d10b0d2d80e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.407537] env[68673]: DEBUG nova.compute.provider_tree [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.415838] env[68673]: DEBUG nova.scheduler.client.report [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1518.432261] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.367s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.432788] env[68673]: ERROR nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1518.432788] env[68673]: Faults: ['InvalidArgument'] [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Traceback (most recent call last): [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] self.driver.spawn(context, instance, image_meta, [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] self._fetch_image_if_missing(context, vi) [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] image_cache(vi, tmp_image_ds_loc) [ 1518.432788] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] vm_util.copy_virtual_disk( [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] session._wait_for_task(vmdk_copy_task) [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] return self.wait_for_task(task_ref) [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] return evt.wait() [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] result = hub.switch() [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] return self.greenlet.switch() [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1518.433132] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] self.f(*self.args, **self.kw) [ 1518.433467] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1518.433467] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] raise exceptions.translate_fault(task_info.error) [ 1518.433467] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1518.433467] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Faults: ['InvalidArgument'] [ 1518.433467] env[68673]: ERROR nova.compute.manager [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] [ 1518.433610] env[68673]: DEBUG nova.compute.utils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1518.434867] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Build of instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 was re-scheduled: A specified parameter was not correct: fileType [ 1518.434867] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1518.435247] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1518.435423] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1518.435590] env[68673]: DEBUG nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1518.435753] env[68673]: DEBUG nova.network.neutron [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1518.825515] env[68673]: DEBUG nova.network.neutron [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.838532] env[68673]: INFO nova.compute.manager [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Took 0.40 seconds to deallocate network for instance. [ 1518.942791] env[68673]: INFO nova.scheduler.client.report [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Deleted allocations for instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 [ 1518.966306] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c8de2592-2fc0-4629-b9be-7cd33cb60760 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 636.010s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.967535] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.919s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.967777] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "94d40e8f-639a-4695-8d3d-1b0d81e29695-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.967981] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.968160] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.970201] env[68673]: INFO nova.compute.manager [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Terminating instance [ 1518.971946] env[68673]: DEBUG nova.compute.manager [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1518.972158] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1518.972671] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf4e8eb6-d35c-4a7e-8dde-790aa284fce1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.977227] env[68673]: DEBUG nova.compute.manager [None req-643a3c71-1391-41e4-83da-3e7aa6142476 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] [instance: e5e06bf8-a836-47a7-87b8-47a04d0b3991] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1518.983797] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc2eeb5-833e-4e82-bc38-b416f6687e53 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.001633] env[68673]: DEBUG nova.compute.manager [None req-643a3c71-1391-41e4-83da-3e7aa6142476 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] [instance: e5e06bf8-a836-47a7-87b8-47a04d0b3991] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1519.012313] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 94d40e8f-639a-4695-8d3d-1b0d81e29695 could not be found. [ 1519.012503] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1519.012675] env[68673]: INFO nova.compute.manager [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1519.012906] env[68673]: DEBUG oslo.service.loopingcall [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.013164] env[68673]: DEBUG nova.compute.manager [-] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1519.013287] env[68673]: DEBUG nova.network.neutron [-] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1519.034193] env[68673]: DEBUG oslo_concurrency.lockutils [None req-643a3c71-1391-41e4-83da-3e7aa6142476 tempest-ServersTestMultiNic-380839840 tempest-ServersTestMultiNic-380839840-project-member] Lock "e5e06bf8-a836-47a7-87b8-47a04d0b3991" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.783s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.038417] env[68673]: DEBUG nova.network.neutron [-] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.044276] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1519.047348] env[68673]: INFO nova.compute.manager [-] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] Took 0.03 seconds to deallocate network for instance. [ 1519.099116] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.099449] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.100899] env[68673]: INFO nova.compute.claims [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1519.141811] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f24cf028-67b3-496c-b89a-4655e379250f tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.142652] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 307.018s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.142967] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 94d40e8f-639a-4695-8d3d-1b0d81e29695] During sync_power_state the instance has a pending task (deleting). Skip. [ 1519.143221] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "94d40e8f-639a-4695-8d3d-1b0d81e29695" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.315368] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a2ead2-f706-4e77-bd5f-154763e22507 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.322697] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c157b7-4c73-4a5b-a054-1971261a633f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.352796] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4507903-02d2-46c7-8f2d-4080471496a9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.359562] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d361a4-9f7b-4814-96a6-e2a7314b1a50 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.372582] env[68673]: DEBUG nova.compute.provider_tree [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1519.381030] env[68673]: DEBUG nova.scheduler.client.report [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1519.394941] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.295s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.395461] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1519.429604] env[68673]: DEBUG nova.compute.utils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1519.431634] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1519.431634] env[68673]: DEBUG nova.network.neutron [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1519.441170] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1519.494520] env[68673]: DEBUG nova.policy [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fba09298253c44a780f09554de2d9083', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd30e8399f1c2459d8e34b59ca25fec89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1519.501031] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1519.525725] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1519.525963] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1519.526133] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1519.526313] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1519.526462] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1519.526609] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1519.526810] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1519.526965] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1519.527145] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1519.527332] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1519.527519] env[68673]: DEBUG nova.virt.hardware [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1519.528389] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b6784a-ced4-4717-97c4-b3c234ce434d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.535969] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4ccf94-f2ab-4013-9922-ac54503093ef {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.883056] env[68673]: DEBUG nova.network.neutron [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Successfully created port: 09bff085-0c5a-4f4c-920c-49eec1ceb86a {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1520.731169] env[68673]: DEBUG nova.network.neutron [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Successfully updated port: 09bff085-0c5a-4f4c-920c-49eec1ceb86a {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1520.743173] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquiring lock "refresh_cache-218a1129-966d-4512-8b4b-222d31ceb106" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.746238] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquired lock "refresh_cache-218a1129-966d-4512-8b4b-222d31ceb106" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.746423] env[68673]: DEBUG nova.network.neutron [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1520.796528] env[68673]: DEBUG nova.network.neutron [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1520.864642] env[68673]: DEBUG nova.compute.manager [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Received event network-vif-plugged-09bff085-0c5a-4f4c-920c-49eec1ceb86a {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1520.864851] env[68673]: DEBUG oslo_concurrency.lockutils [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] Acquiring lock "218a1129-966d-4512-8b4b-222d31ceb106-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.865057] env[68673]: DEBUG oslo_concurrency.lockutils [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] Lock "218a1129-966d-4512-8b4b-222d31ceb106-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.865220] env[68673]: DEBUG oslo_concurrency.lockutils [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] Lock "218a1129-966d-4512-8b4b-222d31ceb106-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.865390] env[68673]: DEBUG nova.compute.manager [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] No waiting events found dispatching network-vif-plugged-09bff085-0c5a-4f4c-920c-49eec1ceb86a {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1520.865546] env[68673]: WARNING nova.compute.manager [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Received unexpected event network-vif-plugged-09bff085-0c5a-4f4c-920c-49eec1ceb86a for instance with vm_state building and task_state spawning. [ 1520.865697] env[68673]: DEBUG nova.compute.manager [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Received event network-changed-09bff085-0c5a-4f4c-920c-49eec1ceb86a {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1520.865842] env[68673]: DEBUG nova.compute.manager [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Refreshing instance network info cache due to event network-changed-09bff085-0c5a-4f4c-920c-49eec1ceb86a. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1520.865998] env[68673]: DEBUG oslo_concurrency.lockutils [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] Acquiring lock "refresh_cache-218a1129-966d-4512-8b4b-222d31ceb106" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.020914] env[68673]: DEBUG nova.network.neutron [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Updating instance_info_cache with network_info: [{"id": "09bff085-0c5a-4f4c-920c-49eec1ceb86a", "address": "fa:16:3e:51:d9:54", "network": {"id": "94634c70-d71b-4004-979f-d6a828a25bbd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-968673185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d30e8399f1c2459d8e34b59ca25fec89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09bff085-0c", "ovs_interfaceid": "09bff085-0c5a-4f4c-920c-49eec1ceb86a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.032711] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Releasing lock "refresh_cache-218a1129-966d-4512-8b4b-222d31ceb106" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.032999] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Instance network_info: |[{"id": "09bff085-0c5a-4f4c-920c-49eec1ceb86a", "address": "fa:16:3e:51:d9:54", "network": {"id": "94634c70-d71b-4004-979f-d6a828a25bbd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-968673185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d30e8399f1c2459d8e34b59ca25fec89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09bff085-0c", "ovs_interfaceid": "09bff085-0c5a-4f4c-920c-49eec1ceb86a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1521.033309] env[68673]: DEBUG oslo_concurrency.lockutils [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] Acquired lock "refresh_cache-218a1129-966d-4512-8b4b-222d31ceb106" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.033492] env[68673]: DEBUG nova.network.neutron [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Refreshing network info cache for port 09bff085-0c5a-4f4c-920c-49eec1ceb86a {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1521.034530] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:d9:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09bff085-0c5a-4f4c-920c-49eec1ceb86a', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1521.042442] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Creating folder: Project (d30e8399f1c2459d8e34b59ca25fec89). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1521.045324] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95163719-e74b-40d6-8754-c0ba1bc30c72 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.055633] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Created folder: Project (d30e8399f1c2459d8e34b59ca25fec89) in parent group-v685311. [ 1521.055840] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Creating folder: Instances. Parent ref: group-v685395. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1521.056081] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8482a76-547d-4134-aad5-a907faf66bdc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.065514] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Created folder: Instances in parent group-v685395. [ 1521.065734] env[68673]: DEBUG oslo.service.loopingcall [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1521.065916] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1521.066127] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d50760d-9ef1-4685-bb17-f28057669b34 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.085967] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1521.085967] env[68673]: value = "task-3433564" [ 1521.085967] env[68673]: _type = "Task" [ 1521.085967] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.093094] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433564, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.301492] env[68673]: DEBUG nova.network.neutron [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Updated VIF entry in instance network info cache for port 09bff085-0c5a-4f4c-920c-49eec1ceb86a. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1521.302040] env[68673]: DEBUG nova.network.neutron [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Updating instance_info_cache with network_info: [{"id": "09bff085-0c5a-4f4c-920c-49eec1ceb86a", "address": "fa:16:3e:51:d9:54", "network": {"id": "94634c70-d71b-4004-979f-d6a828a25bbd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-968673185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d30e8399f1c2459d8e34b59ca25fec89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09bff085-0c", "ovs_interfaceid": "09bff085-0c5a-4f4c-920c-49eec1ceb86a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.311964] env[68673]: DEBUG oslo_concurrency.lockutils [req-0a7a8a78-2fa1-4c83-8d77-fe1e94f34368 req-63472dfb-1404-4a3a-860a-c0897ce3954d service nova] Releasing lock "refresh_cache-218a1129-966d-4512-8b4b-222d31ceb106" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.596400] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433564, 'name': CreateVM_Task, 'duration_secs': 0.272251} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.596596] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1521.597257] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.597420] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.597733] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1521.597975] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce764b6e-8efe-4268-ba22-a28a2f98c06d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.602293] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Waiting for the task: (returnval){ [ 1521.602293] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52562fd3-dc85-da42-59e8-2595a321d17b" [ 1521.602293] env[68673]: _type = "Task" [ 1521.602293] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.609624] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52562fd3-dc85-da42-59e8-2595a321d17b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.112737] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.112994] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1522.113215] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.277456] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquiring lock "218a1129-966d-4512-8b4b-222d31ceb106" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.530595] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "d79f254c-7c00-4cf8-85ac-6db513533da3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.530595] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.789380] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.789758] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.789890] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1547.784473] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1548.784303] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1548.784594] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1548.784594] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1548.806552] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.806700] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.806834] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.806958] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.807095] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.807218] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.807335] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.807451] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.807563] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.807675] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1548.807789] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1551.783956] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.783305] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.783121] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.783383] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.798532] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.798744] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.798909] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.799075] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1553.800183] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a900fb92-dffc-441b-83bd-24b586d159c9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.809423] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cdc567-2de5-4ed3-b39b-88ca0500332d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.823734] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21171b97-8aed-47e7-8c4c-bb6473350951 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.830537] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe80d12c-3ef5-4d85-844d-42ae19adf9cb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.859194] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180880MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1553.859357] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.859620] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.934466] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.934635] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.934762] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.934900] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.935017] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.935908] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.935908] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.935908] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.935908] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.936096] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1553.947183] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c40c36ba-3be1-4be5-a28e-dd6080a0b7cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1553.958753] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1553.969300] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1553.980387] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1553.990645] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1554.003144] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1554.003593] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1554.003593] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1554.186157] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddce7d6-e22e-448e-9d27-b9b535dc9b1d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.193714] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d20362-343c-487d-b614-ee55c3c62e90 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.223623] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bafd16-cbed-483c-9712-e8298b1b36e4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.230446] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2098b9b-2f93-4e75-904e-954a1706aace {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.242976] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.251243] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1554.264200] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1554.264381] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.405s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.260549] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1557.783916] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.489304] env[68673]: WARNING oslo_vmware.rw_handles [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1566.489304] env[68673]: ERROR oslo_vmware.rw_handles [ 1566.489840] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1566.491786] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1566.492016] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Copying Virtual Disk [datastore1] vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/98596780-51f5-446f-801f-3ba9b5d46e3f/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1566.492294] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee4ceecf-0a43-4c0e-85d3-c99191a1624b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.500457] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Waiting for the task: (returnval){ [ 1566.500457] env[68673]: value = "task-3433565" [ 1566.500457] env[68673]: _type = "Task" [ 1566.500457] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.509666] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Task: {'id': task-3433565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.010088] env[68673]: DEBUG oslo_vmware.exceptions [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1567.010428] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.010996] env[68673]: ERROR nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1567.010996] env[68673]: Faults: ['InvalidArgument'] [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Traceback (most recent call last): [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] yield resources [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] self.driver.spawn(context, instance, image_meta, [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] self._fetch_image_if_missing(context, vi) [ 1567.010996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] image_cache(vi, tmp_image_ds_loc) [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] vm_util.copy_virtual_disk( [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] session._wait_for_task(vmdk_copy_task) [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] return self.wait_for_task(task_ref) [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] return evt.wait() [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] result = hub.switch() [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1567.011423] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] return self.greenlet.switch() [ 1567.011749] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1567.011749] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] self.f(*self.args, **self.kw) [ 1567.011749] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1567.011749] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] raise exceptions.translate_fault(task_info.error) [ 1567.011749] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1567.011749] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Faults: ['InvalidArgument'] [ 1567.011749] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] [ 1567.011749] env[68673]: INFO nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Terminating instance [ 1567.013225] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.013446] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.014055] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1567.014241] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1567.014461] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab553372-8c1e-45c8-8329-4f0ee048d925 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.016850] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eceb81d2-755a-4d10-84c3-d62070ee2a6f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.023675] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1567.023881] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eeb7c9e7-0de9-4188-97b7-27eb35505cfa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.025953] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.026139] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1567.027163] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa864868-3bc1-438a-9aad-0fc8d22593cb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.031745] env[68673]: DEBUG oslo_vmware.api [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for the task: (returnval){ [ 1567.031745] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52ade792-dcd0-e78e-75f7-7df237bb87c4" [ 1567.031745] env[68673]: _type = "Task" [ 1567.031745] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.042643] env[68673]: DEBUG oslo_vmware.api [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52ade792-dcd0-e78e-75f7-7df237bb87c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.085673] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1567.085839] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1567.086036] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Deleting the datastore file [datastore1] bfb20e23-e3fa-40b8-a114-222c148db6b0 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1567.086254] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f74a4e0-0ca0-4b61-a8dd-417b16d7f2a9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.092272] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Waiting for the task: (returnval){ [ 1567.092272] env[68673]: value = "task-3433567" [ 1567.092272] env[68673]: _type = "Task" [ 1567.092272] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.099735] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Task: {'id': task-3433567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.541385] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1567.541692] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Creating directory with path [datastore1] vmware_temp/cd801ee4-b7e4-4aa6-af0c-78460e005fd9/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.541977] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43ff1a9a-0da0-4c83-91f7-18d7401714d3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.553759] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Created directory with path [datastore1] vmware_temp/cd801ee4-b7e4-4aa6-af0c-78460e005fd9/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.554018] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Fetch image to [datastore1] vmware_temp/cd801ee4-b7e4-4aa6-af0c-78460e005fd9/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1567.554196] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/cd801ee4-b7e4-4aa6-af0c-78460e005fd9/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1567.554944] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57fecb6-7124-43ca-bcd0-c1cfdfb28c64 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.561859] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835f892d-eaee-4553-a181-82c473bf0487 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.572126] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9224d8e-ca8c-4613-84f1-ad678dd3f867 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.607355] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390ece0a-eb02-4559-a2a1-429eb67719f1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.613042] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "f1d23d68-3a7f-4f5d-a13f-22f77917dd6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.613231] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "f1d23d68-3a7f-4f5d-a13f-22f77917dd6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.618307] env[68673]: DEBUG oslo_vmware.api [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Task: {'id': task-3433567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074154} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.619886] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1567.620195] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1567.620275] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1567.620635] env[68673]: INFO nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1567.622741] env[68673]: DEBUG nova.compute.claims [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1567.622922] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.623154] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.625711] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a5a3931a-7620-46d4-833f-9022486236d8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.652531] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1567.816343] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.817140] env[68673]: ERROR nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = getattr(controller, method)(*args, **kwargs) [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._get(image_id) [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1567.817140] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] resp, body = self.http_client.get(url, headers=header) [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.request(url, 'GET', **kwargs) [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._handle_response(resp) [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exc.from_response(resp, resp.content) [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] During handling of the above exception, another exception occurred: [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1567.817419] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] yield resources [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self.driver.spawn(context, instance, image_meta, [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._fetch_image_if_missing(context, vi) [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image_fetch(context, vi, tmp_image_ds_loc) [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] images.fetch_image( [ 1567.817702] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] metadata = IMAGE_API.get(context, image_ref) [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return session.show(context, image_id, [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] _reraise_translated_image_exception(image_id) [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise new_exc.with_traceback(exc_trace) [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = getattr(controller, method)(*args, **kwargs) [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1567.818069] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._get(image_id) [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] resp, body = self.http_client.get(url, headers=header) [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.request(url, 'GET', **kwargs) [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._handle_response(resp) [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exc.from_response(resp, resp.content) [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1567.818358] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1567.818628] env[68673]: INFO nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Terminating instance [ 1567.819090] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.819234] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.821900] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1567.822106] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1567.822350] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c9c83cf-2f9c-4ce3-b5a1-35adb925c539 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.825116] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61097668-8a88-4838-a3ba-abe0d658458a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.832892] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1567.833145] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8f27a1c-f6da-45d6-98aa-54085e8ef1cc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.835331] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.835506] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1567.838583] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad88af07-70f0-433d-8f10-4cdce11f529b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.843584] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Waiting for the task: (returnval){ [ 1567.843584] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]528dd2d8-289c-e032-a5f0-f223fd174729" [ 1567.843584] env[68673]: _type = "Task" [ 1567.843584] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.855113] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]528dd2d8-289c-e032-a5f0-f223fd174729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.881028] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264723b6-ebdc-4b80-8c18-d9937d5eb1b3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.887754] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050abf2e-c197-4085-836c-91ba518ef561 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.892543] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1567.892791] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1567.893029] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Deleting the datastore file [datastore1] 59b4e1de-612b-40f7-bc82-8c5eb3701b7c {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1567.893254] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-264e255b-3d9c-4cd3-8543-d1aeef90050d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.918902] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69995c2-7f51-41ec-a173-b35b2003dbeb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.922158] env[68673]: DEBUG oslo_vmware.api [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for the task: (returnval){ [ 1567.922158] env[68673]: value = "task-3433569" [ 1567.922158] env[68673]: _type = "Task" [ 1567.922158] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.928280] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7490316d-2923-4ce5-b939-4b45c0738d50 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.934471] env[68673]: DEBUG oslo_vmware.api [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Task: {'id': task-3433569, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.944845] env[68673]: DEBUG nova.compute.provider_tree [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1567.955461] env[68673]: DEBUG nova.scheduler.client.report [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1567.969644] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.346s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.970168] env[68673]: ERROR nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1567.970168] env[68673]: Faults: ['InvalidArgument'] [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Traceback (most recent call last): [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] self.driver.spawn(context, instance, image_meta, [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] self._fetch_image_if_missing(context, vi) [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] image_cache(vi, tmp_image_ds_loc) [ 1567.970168] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] vm_util.copy_virtual_disk( [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] session._wait_for_task(vmdk_copy_task) [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] return self.wait_for_task(task_ref) [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] return evt.wait() [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] result = hub.switch() [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] return self.greenlet.switch() [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1567.970580] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] self.f(*self.args, **self.kw) [ 1567.970996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1567.970996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] raise exceptions.translate_fault(task_info.error) [ 1567.970996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1567.970996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Faults: ['InvalidArgument'] [ 1567.970996] env[68673]: ERROR nova.compute.manager [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] [ 1567.970996] env[68673]: DEBUG nova.compute.utils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1567.972373] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Build of instance bfb20e23-e3fa-40b8-a114-222c148db6b0 was re-scheduled: A specified parameter was not correct: fileType [ 1567.972373] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1567.972731] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1567.972901] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1567.973084] env[68673]: DEBUG nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1567.973245] env[68673]: DEBUG nova.network.neutron [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1568.353472] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1568.353729] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Creating directory with path [datastore1] vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1568.353959] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed52a868-72f7-45f9-a2fa-c2a4e66e7a14 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.359814] env[68673]: DEBUG nova.network.neutron [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.366256] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Created directory with path [datastore1] vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1568.366256] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Fetch image to [datastore1] vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1568.366256] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1568.367224] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1567ec-6560-4a8d-8422-f71f58b400a9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.374673] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf825900-0c2c-4230-bdb8-ae29863cda3a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.377909] env[68673]: INFO nova.compute.manager [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Took 0.40 seconds to deallocate network for instance. [ 1568.387928] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf76365-6337-4b11-820c-0b253c8029d9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.426024] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8393b680-2e2a-47cf-8982-0120a49df667 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.436793] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-71694aba-b2d4-4807-9c57-ce28638a6f0a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.438956] env[68673]: DEBUG oslo_vmware.api [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Task: {'id': task-3433569, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07325} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.439301] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1568.439538] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1568.439732] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1568.439908] env[68673]: INFO nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1568.442029] env[68673]: DEBUG nova.compute.claims [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1568.442223] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.442441] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.484218] env[68673]: INFO nova.scheduler.client.report [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Deleted allocations for instance bfb20e23-e3fa-40b8-a114-222c148db6b0 [ 1568.507674] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2fc06cd3-f7b6-4bfe-b3dd-8425a27dcf41 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.025s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.507916] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.497s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.508191] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Acquiring lock "bfb20e23-e3fa-40b8-a114-222c148db6b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.508421] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.508617] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.510600] env[68673]: INFO nova.compute.manager [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Terminating instance [ 1568.512454] env[68673]: DEBUG nova.compute.manager [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1568.512617] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1568.513126] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce9c5653-dcce-43aa-82f3-7bcec6d51c62 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.524834] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3415365b-86c8-4320-a4aa-9ae6efb74b15 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.539673] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1568.541460] env[68673]: DEBUG nova.compute.manager [None req-e6111626-7de0-44a3-8fd6-9530ba5bc67b tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: c40c36ba-3be1-4be5-a28e-dd6080a0b7cf] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1568.565102] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bfb20e23-e3fa-40b8-a114-222c148db6b0 could not be found. [ 1568.565334] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1568.565514] env[68673]: INFO nova.compute.manager [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1568.565758] env[68673]: DEBUG oslo.service.loopingcall [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1568.569929] env[68673]: DEBUG nova.compute.manager [-] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1568.570048] env[68673]: DEBUG nova.network.neutron [-] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1568.572929] env[68673]: DEBUG nova.compute.manager [None req-e6111626-7de0-44a3-8fd6-9530ba5bc67b tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: c40c36ba-3be1-4be5-a28e-dd6080a0b7cf] Instance disappeared before build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1568.607244] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e6111626-7de0-44a3-8fd6-9530ba5bc67b tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "c40c36ba-3be1-4be5-a28e-dd6080a0b7cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.835s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.609442] env[68673]: DEBUG nova.network.neutron [-] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.617109] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1568.621768] env[68673]: INFO nova.compute.manager [-] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] Took 0.05 seconds to deallocate network for instance. [ 1568.638549] env[68673]: DEBUG oslo_vmware.rw_handles [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1568.710439] env[68673]: DEBUG oslo_vmware.rw_handles [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1568.710592] env[68673]: DEBUG oslo_vmware.rw_handles [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1568.733436] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.779127] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c9e7fd79-3934-45a0-8492-3ee7b2febd59 tempest-ListServerFiltersTestJSON-422128185 tempest-ListServerFiltersTestJSON-422128185-project-member] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.271s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.780023] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 356.655s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.780224] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bfb20e23-e3fa-40b8-a114-222c148db6b0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1568.780400] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "bfb20e23-e3fa-40b8-a114-222c148db6b0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.812966] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6e120c-2c06-4f0f-a5fe-8485b4807b3b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.821022] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d465445a-8012-48dc-936b-fca944c999dc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.860149] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2dd2ed-5e3c-40db-8ef5-084191204c85 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.866414] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b561168-9d97-4d83-8066-889beff0e44b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.880100] env[68673]: DEBUG nova.compute.provider_tree [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.890042] env[68673]: DEBUG nova.scheduler.client.report [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1568.904379] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.462s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.905190] env[68673]: ERROR nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = getattr(controller, method)(*args, **kwargs) [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._get(image_id) [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1568.905190] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] resp, body = self.http_client.get(url, headers=header) [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.request(url, 'GET', **kwargs) [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._handle_response(resp) [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exc.from_response(resp, resp.content) [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] During handling of the above exception, another exception occurred: [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1568.905520] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self.driver.spawn(context, instance, image_meta, [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._fetch_image_if_missing(context, vi) [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image_fetch(context, vi, tmp_image_ds_loc) [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] images.fetch_image( [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] metadata = IMAGE_API.get(context, image_ref) [ 1568.905851] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return session.show(context, image_id, [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] _reraise_translated_image_exception(image_id) [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise new_exc.with_traceback(exc_trace) [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = getattr(controller, method)(*args, **kwargs) [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._get(image_id) [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1568.906315] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] resp, body = self.http_client.get(url, headers=header) [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.request(url, 'GET', **kwargs) [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._handle_response(resp) [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exc.from_response(resp, resp.content) [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1568.906670] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1568.906670] env[68673]: DEBUG nova.compute.utils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1568.907213] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.174s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.908543] env[68673]: INFO nova.compute.claims [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1568.911321] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Build of instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c was re-scheduled: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1568.911714] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1568.911902] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1568.912050] env[68673]: DEBUG nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1568.912216] env[68673]: DEBUG nova.network.neutron [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1569.025748] env[68673]: DEBUG neutronclient.v2_0.client [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68673) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1569.026994] env[68673]: ERROR nova.compute.manager [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = getattr(controller, method)(*args, **kwargs) [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._get(image_id) [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1569.026994] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] resp, body = self.http_client.get(url, headers=header) [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.request(url, 'GET', **kwargs) [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._handle_response(resp) [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exc.from_response(resp, resp.content) [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] During handling of the above exception, another exception occurred: [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.027341] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self.driver.spawn(context, instance, image_meta, [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._fetch_image_if_missing(context, vi) [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image_fetch(context, vi, tmp_image_ds_loc) [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] images.fetch_image( [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] metadata = IMAGE_API.get(context, image_ref) [ 1569.027651] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return session.show(context, image_id, [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] _reraise_translated_image_exception(image_id) [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise new_exc.with_traceback(exc_trace) [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = getattr(controller, method)(*args, **kwargs) [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._get(image_id) [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1569.027987] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] resp, body = self.http_client.get(url, headers=header) [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.request(url, 'GET', **kwargs) [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self._handle_response(resp) [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exc.from_response(resp, resp.content) [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] nova.exception.ImageNotAuthorized: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] During handling of the above exception, another exception occurred: [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.028398] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._build_and_run_instance(context, instance, image, [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exception.RescheduledException( [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] nova.exception.RescheduledException: Build of instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c was re-scheduled: Not authorized for image 7da4e48b-416f-425b-b73b-3305c69c87ef. [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] During handling of the above exception, another exception occurred: [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1569.028928] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] exception_handler_v20(status_code, error_body) [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise client_exc(message=error_message, [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Neutron server returns request_ids: ['req-a4eac681-0931-4d98-bd76-da95285c7b4b'] [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] During handling of the above exception, another exception occurred: [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._deallocate_network(context, instance, requested_networks) [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self.network_api.deallocate_for_instance( [ 1569.029321] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] data = neutron.list_ports(**search_opts) [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.list('ports', self.ports_path, retrieve_all, [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] for r in self._pagination(collection, path, **params): [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] res = self.get(path, params=params) [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.029677] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.retry_request("GET", action, body=body, [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.do_request(method, action, body=body, [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._handle_fault_response(status_code, replybody, resp) [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exception.Unauthorized() [ 1569.030014] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] nova.exception.Unauthorized: Not authorized. [ 1569.030379] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.075777] env[68673]: INFO nova.scheduler.client.report [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Deleted allocations for instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c [ 1569.094436] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7ff9678a-a919-4f24-b3a2-1a77bf34c64b tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 538.833s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.097474] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 356.973s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.097661] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] During sync_power_state the instance has a pending task (spawning). Skip. [ 1569.097833] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.098602] env[68673]: DEBUG oslo_concurrency.lockutils [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 341.969s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.098964] env[68673]: DEBUG oslo_concurrency.lockutils [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Acquiring lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.099302] env[68673]: DEBUG oslo_concurrency.lockutils [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.099483] env[68673]: DEBUG oslo_concurrency.lockutils [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.101297] env[68673]: INFO nova.compute.manager [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Terminating instance [ 1569.103017] env[68673]: DEBUG nova.compute.manager [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1569.103221] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1569.103470] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa1a3e44-6e28-411c-be1d-ff4e00734caa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.109052] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1569.115285] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a660a3-3cd9-4473-856a-3034aa709834 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.145413] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59b4e1de-612b-40f7-bc82-8c5eb3701b7c could not be found. [ 1569.145627] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1569.145805] env[68673]: INFO nova.compute.manager [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1569.146060] env[68673]: DEBUG oslo.service.loopingcall [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.150288] env[68673]: DEBUG nova.compute.manager [-] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1569.150401] env[68673]: DEBUG nova.network.neutron [-] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1569.162746] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.164157] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2915d2-4575-4903-87a5-fd4f7ffba3f8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.173570] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331a7e5d-e417-4061-8b05-17f53cf6236a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.206076] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60ebcd5-99c4-41a9-8742-300bfa86c92b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.213631] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a78540-0b2c-479d-bee7-0f13dbe255ce {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.226877] env[68673]: DEBUG nova.compute.provider_tree [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1569.234887] env[68673]: DEBUG nova.scheduler.client.report [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1569.243955] env[68673]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68673) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1569.244190] env[68673]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-36d71bf6-aedd-4ffc-9c24-01b98c16264c'] [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1569.244680] env[68673]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1569.245145] env[68673]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1569.245618] env[68673]: ERROR oslo.service.loopingcall [ 1569.246060] env[68673]: ERROR nova.compute.manager [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1569.247977] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.341s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.248428] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1569.250704] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.088s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.252054] env[68673]: INFO nova.compute.claims [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1569.279635] env[68673]: DEBUG nova.compute.utils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1569.280860] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1569.281069] env[68673]: DEBUG nova.network.neutron [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1569.287909] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1569.291955] env[68673]: ERROR nova.compute.manager [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] exception_handler_v20(status_code, error_body) [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise client_exc(message=error_message, [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Neutron server returns request_ids: ['req-36d71bf6-aedd-4ffc-9c24-01b98c16264c'] [ 1569.291955] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] During handling of the above exception, another exception occurred: [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Traceback (most recent call last): [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._delete_instance(context, instance, bdms) [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._shutdown_instance(context, instance, bdms) [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._try_deallocate_network(context, instance, requested_networks) [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] with excutils.save_and_reraise_exception(): [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1569.292371] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self.force_reraise() [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise self.value [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] _deallocate_network_with_retries() [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return evt.wait() [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = hub.switch() [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.greenlet.switch() [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1569.292733] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = func(*self.args, **self.kw) [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] result = f(*args, **kwargs) [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._deallocate_network( [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self.network_api.deallocate_for_instance( [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] data = neutron.list_ports(**search_opts) [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.list('ports', self.ports_path, retrieve_all, [ 1569.293075] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] for r in self._pagination(collection, path, **params): [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] res = self.get(path, params=params) [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.retry_request("GET", action, body=body, [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1569.293429] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] return self.do_request(method, action, body=body, [ 1569.293789] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.293789] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] ret = obj(*args, **kwargs) [ 1569.293789] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1569.293789] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] self._handle_fault_response(status_code, replybody, resp) [ 1569.293789] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1569.293789] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1569.293789] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1569.293789] env[68673]: ERROR nova.compute.manager [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] [ 1569.317859] env[68673]: DEBUG oslo_concurrency.lockutils [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Lock "59b4e1de-612b-40f7-bc82-8c5eb3701b7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.219s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.339047] env[68673]: DEBUG nova.policy [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b0ed7aa0f5c413d9cc32a8bbf4724df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92e8efd351c449e8815c0ec3b6070d20', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1569.363125] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1569.376949] env[68673]: INFO nova.compute.manager [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] [instance: 59b4e1de-612b-40f7-bc82-8c5eb3701b7c] Successfully reverted task state from None on failure for instance. [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server [None req-73d13b36-7262-4caf-81ea-71dcd6401a4f tempest-MigrationsAdminTest-28536855 tempest-MigrationsAdminTest-28536855-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-36d71bf6-aedd-4ffc-9c24-01b98c16264c'] [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1569.384330] env[68673]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1569.384828] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1569.385358] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server raise self.value [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1569.385832] env[68673]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.386574] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1569.387062] env[68673]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1569.387536] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1569.387536] env[68673]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1569.387536] env[68673]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1569.387536] env[68673]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1569.387536] env[68673]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1569.387536] env[68673]: ERROR oslo_messaging.rpc.server [ 1569.390483] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1569.390707] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1569.390890] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.391240] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1569.391440] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.391595] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1569.392121] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1569.392339] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1569.392520] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1569.392688] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1569.392873] env[68673]: DEBUG nova.virt.hardware [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1569.393934] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78765064-a735-42f5-adf2-2a65673b5215 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.404168] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acfb4ce-42ff-4366-b6d8-3ee99f8d33aa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.499321] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d03c655-5e8a-4689-9f0c-f64daed21576 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.506726] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0014e532-302f-403d-bdd0-348e0b37fe62 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.536884] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4630af-167b-45e3-8ca0-392620a575c1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.543784] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfe59c8-4370-4fdf-9ac3-2392952ce606 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.557560] env[68673]: DEBUG nova.compute.provider_tree [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1569.566261] env[68673]: DEBUG nova.scheduler.client.report [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1569.581403] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.331s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.581886] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1569.621482] env[68673]: DEBUG nova.compute.utils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1569.622879] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1569.623170] env[68673]: DEBUG nova.network.neutron [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1569.637395] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1569.688371] env[68673]: DEBUG nova.policy [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7bff7d56c3a04a9890a60600923d970a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82ef9a92319d4eb5a1fe96aa42886426', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1569.700195] env[68673]: DEBUG nova.network.neutron [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Successfully created port: 821980ee-7dae-45a1-bdab-17d8773524b6 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1569.719208] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1569.747196] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1569.747454] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1569.747608] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.747785] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1569.748062] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.748139] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1569.748302] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1569.748440] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1569.748606] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1569.748790] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1569.748968] env[68673]: DEBUG nova.virt.hardware [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1569.749849] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a48cf93-2f54-4aa2-be40-55f5cd46a765 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.757994] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5028fa-bdf2-422e-99fd-bfdb9b4e1e28 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.265763] env[68673]: DEBUG nova.network.neutron [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Successfully created port: 4abf4ccb-eb50-4537-97ef-b86130a720dd {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1570.443302] env[68673]: DEBUG nova.compute.manager [req-bea48fab-c241-4073-8f07-779c4835fb0d req-be8ad006-df36-4bb2-ab62-7fd65928d16c service nova] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Received event network-vif-plugged-821980ee-7dae-45a1-bdab-17d8773524b6 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1570.443535] env[68673]: DEBUG oslo_concurrency.lockutils [req-bea48fab-c241-4073-8f07-779c4835fb0d req-be8ad006-df36-4bb2-ab62-7fd65928d16c service nova] Acquiring lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.443739] env[68673]: DEBUG oslo_concurrency.lockutils [req-bea48fab-c241-4073-8f07-779c4835fb0d req-be8ad006-df36-4bb2-ab62-7fd65928d16c service nova] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.443907] env[68673]: DEBUG oslo_concurrency.lockutils [req-bea48fab-c241-4073-8f07-779c4835fb0d req-be8ad006-df36-4bb2-ab62-7fd65928d16c service nova] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.444087] env[68673]: DEBUG nova.compute.manager [req-bea48fab-c241-4073-8f07-779c4835fb0d req-be8ad006-df36-4bb2-ab62-7fd65928d16c service nova] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] No waiting events found dispatching network-vif-plugged-821980ee-7dae-45a1-bdab-17d8773524b6 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1570.444251] env[68673]: WARNING nova.compute.manager [req-bea48fab-c241-4073-8f07-779c4835fb0d req-be8ad006-df36-4bb2-ab62-7fd65928d16c service nova] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Received unexpected event network-vif-plugged-821980ee-7dae-45a1-bdab-17d8773524b6 for instance with vm_state building and task_state spawning. [ 1570.526441] env[68673]: DEBUG nova.network.neutron [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Successfully updated port: 821980ee-7dae-45a1-bdab-17d8773524b6 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1570.541724] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "refresh_cache-c6f7698c-3a1d-47e7-aeac-fd0e50376a39" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.541904] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "refresh_cache-c6f7698c-3a1d-47e7-aeac-fd0e50376a39" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.542057] env[68673]: DEBUG nova.network.neutron [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1570.599836] env[68673]: DEBUG nova.network.neutron [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1570.775980] env[68673]: DEBUG nova.network.neutron [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Updating instance_info_cache with network_info: [{"id": "821980ee-7dae-45a1-bdab-17d8773524b6", "address": "fa:16:3e:9b:ea:75", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap821980ee-7d", "ovs_interfaceid": "821980ee-7dae-45a1-bdab-17d8773524b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.787812] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "refresh_cache-c6f7698c-3a1d-47e7-aeac-fd0e50376a39" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.788163] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Instance network_info: |[{"id": "821980ee-7dae-45a1-bdab-17d8773524b6", "address": "fa:16:3e:9b:ea:75", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap821980ee-7d", "ovs_interfaceid": "821980ee-7dae-45a1-bdab-17d8773524b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1570.788580] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:ea:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '821980ee-7dae-45a1-bdab-17d8773524b6', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1570.800368] env[68673]: DEBUG oslo.service.loopingcall [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1570.800847] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1570.804152] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99571694-72d8-45dc-85de-ee58223d7988 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.828198] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1570.828198] env[68673]: value = "task-3433570" [ 1570.828198] env[68673]: _type = "Task" [ 1570.828198] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.836185] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433570, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.916779] env[68673]: DEBUG nova.compute.manager [req-ea056f09-edb9-4b36-a164-799cab9412e0 req-d44d11dc-d1da-4b94-a4bd-05061b7763ac service nova] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Received event network-vif-plugged-4abf4ccb-eb50-4537-97ef-b86130a720dd {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1570.917051] env[68673]: DEBUG oslo_concurrency.lockutils [req-ea056f09-edb9-4b36-a164-799cab9412e0 req-d44d11dc-d1da-4b94-a4bd-05061b7763ac service nova] Acquiring lock "601dfed1-fb7b-413a-836d-7fda61314c73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.917320] env[68673]: DEBUG oslo_concurrency.lockutils [req-ea056f09-edb9-4b36-a164-799cab9412e0 req-d44d11dc-d1da-4b94-a4bd-05061b7763ac service nova] Lock "601dfed1-fb7b-413a-836d-7fda61314c73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.917678] env[68673]: DEBUG oslo_concurrency.lockutils [req-ea056f09-edb9-4b36-a164-799cab9412e0 req-d44d11dc-d1da-4b94-a4bd-05061b7763ac service nova] Lock "601dfed1-fb7b-413a-836d-7fda61314c73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.917678] env[68673]: DEBUG nova.compute.manager [req-ea056f09-edb9-4b36-a164-799cab9412e0 req-d44d11dc-d1da-4b94-a4bd-05061b7763ac service nova] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] No waiting events found dispatching network-vif-plugged-4abf4ccb-eb50-4537-97ef-b86130a720dd {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1570.917847] env[68673]: WARNING nova.compute.manager [req-ea056f09-edb9-4b36-a164-799cab9412e0 req-d44d11dc-d1da-4b94-a4bd-05061b7763ac service nova] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Received unexpected event network-vif-plugged-4abf4ccb-eb50-4537-97ef-b86130a720dd for instance with vm_state building and task_state spawning. [ 1570.968794] env[68673]: DEBUG nova.network.neutron [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Successfully updated port: 4abf4ccb-eb50-4537-97ef-b86130a720dd {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1570.977708] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquiring lock "refresh_cache-601dfed1-fb7b-413a-836d-7fda61314c73" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.977861] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquired lock "refresh_cache-601dfed1-fb7b-413a-836d-7fda61314c73" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.978037] env[68673]: DEBUG nova.network.neutron [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1571.024707] env[68673]: DEBUG nova.network.neutron [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1571.249655] env[68673]: DEBUG nova.network.neutron [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Updating instance_info_cache with network_info: [{"id": "4abf4ccb-eb50-4537-97ef-b86130a720dd", "address": "fa:16:3e:bf:af:d6", "network": {"id": "89182668-7e26-41d4-9384-d7fc20098ee9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1332694438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82ef9a92319d4eb5a1fe96aa42886426", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6a31b224-77d7-48a4-af87-312758611b8e", "external-id": "nsx-vlan-transportzone-761", "segmentation_id": 761, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4abf4ccb-eb", "ovs_interfaceid": "4abf4ccb-eb50-4537-97ef-b86130a720dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.265086] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Releasing lock "refresh_cache-601dfed1-fb7b-413a-836d-7fda61314c73" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.265722] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Instance network_info: |[{"id": "4abf4ccb-eb50-4537-97ef-b86130a720dd", "address": "fa:16:3e:bf:af:d6", "network": {"id": "89182668-7e26-41d4-9384-d7fc20098ee9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1332694438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82ef9a92319d4eb5a1fe96aa42886426", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6a31b224-77d7-48a4-af87-312758611b8e", "external-id": "nsx-vlan-transportzone-761", "segmentation_id": 761, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4abf4ccb-eb", "ovs_interfaceid": "4abf4ccb-eb50-4537-97ef-b86130a720dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1571.266417] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:af:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6a31b224-77d7-48a4-af87-312758611b8e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4abf4ccb-eb50-4537-97ef-b86130a720dd', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1571.278068] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Creating folder: Project (82ef9a92319d4eb5a1fe96aa42886426). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1571.278765] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-153f942d-e791-401a-9fcd-aea4ae2196e9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.290793] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Created folder: Project (82ef9a92319d4eb5a1fe96aa42886426) in parent group-v685311. [ 1571.291118] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Creating folder: Instances. Parent ref: group-v685399. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1571.291467] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce57a416-1b34-4576-90f8-c4c591c6be76 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.301670] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Created folder: Instances in parent group-v685399. [ 1571.302036] env[68673]: DEBUG oslo.service.loopingcall [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.302315] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1571.302673] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3296b389-1cef-4436-b9be-f04da100899a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.323676] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1571.323676] env[68673]: value = "task-3433573" [ 1571.323676] env[68673]: _type = "Task" [ 1571.323676] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.332267] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433573, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.339389] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433570, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.833433] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433573, 'name': CreateVM_Task, 'duration_secs': 0.328356} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.836243] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1571.836893] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.837076] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.837396] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1571.837925] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a5b6792-1b14-46c2-830f-f305be56f7bb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.842653] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433570, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.845363] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Waiting for the task: (returnval){ [ 1571.845363] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52a734ea-29ee-44d5-2771-e9acb16c39a7" [ 1571.845363] env[68673]: _type = "Task" [ 1571.845363] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.852223] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52a734ea-29ee-44d5-2771-e9acb16c39a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.344361] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433570, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.355233] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.355728] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1572.356062] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.472873] env[68673]: DEBUG nova.compute.manager [req-b8664bc3-cfca-4e3d-9652-3f90534deb47 req-8a806989-52ba-4821-89b9-3653ba31b91b service nova] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Received event network-changed-821980ee-7dae-45a1-bdab-17d8773524b6 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1572.473074] env[68673]: DEBUG nova.compute.manager [req-b8664bc3-cfca-4e3d-9652-3f90534deb47 req-8a806989-52ba-4821-89b9-3653ba31b91b service nova] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Refreshing instance network info cache due to event network-changed-821980ee-7dae-45a1-bdab-17d8773524b6. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1572.473287] env[68673]: DEBUG oslo_concurrency.lockutils [req-b8664bc3-cfca-4e3d-9652-3f90534deb47 req-8a806989-52ba-4821-89b9-3653ba31b91b service nova] Acquiring lock "refresh_cache-c6f7698c-3a1d-47e7-aeac-fd0e50376a39" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.473429] env[68673]: DEBUG oslo_concurrency.lockutils [req-b8664bc3-cfca-4e3d-9652-3f90534deb47 req-8a806989-52ba-4821-89b9-3653ba31b91b service nova] Acquired lock "refresh_cache-c6f7698c-3a1d-47e7-aeac-fd0e50376a39" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.473590] env[68673]: DEBUG nova.network.neutron [req-b8664bc3-cfca-4e3d-9652-3f90534deb47 req-8a806989-52ba-4821-89b9-3653ba31b91b service nova] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Refreshing network info cache for port 821980ee-7dae-45a1-bdab-17d8773524b6 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1572.831068] env[68673]: DEBUG nova.network.neutron [req-b8664bc3-cfca-4e3d-9652-3f90534deb47 req-8a806989-52ba-4821-89b9-3653ba31b91b service nova] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Updated VIF entry in instance network info cache for port 821980ee-7dae-45a1-bdab-17d8773524b6. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1572.831440] env[68673]: DEBUG nova.network.neutron [req-b8664bc3-cfca-4e3d-9652-3f90534deb47 req-8a806989-52ba-4821-89b9-3653ba31b91b service nova] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Updating instance_info_cache with network_info: [{"id": "821980ee-7dae-45a1-bdab-17d8773524b6", "address": "fa:16:3e:9b:ea:75", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap821980ee-7d", "ovs_interfaceid": "821980ee-7dae-45a1-bdab-17d8773524b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.842607] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433570, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.843569] env[68673]: DEBUG oslo_concurrency.lockutils [req-b8664bc3-cfca-4e3d-9652-3f90534deb47 req-8a806989-52ba-4821-89b9-3653ba31b91b service nova] Releasing lock "refresh_cache-c6f7698c-3a1d-47e7-aeac-fd0e50376a39" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.942643] env[68673]: DEBUG nova.compute.manager [req-acc83e99-6e0e-4b3f-9eae-e3563cc31cb2 req-933c0736-3149-4647-a10c-826944bceb23 service nova] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Received event network-changed-4abf4ccb-eb50-4537-97ef-b86130a720dd {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1572.942848] env[68673]: DEBUG nova.compute.manager [req-acc83e99-6e0e-4b3f-9eae-e3563cc31cb2 req-933c0736-3149-4647-a10c-826944bceb23 service nova] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Refreshing instance network info cache due to event network-changed-4abf4ccb-eb50-4537-97ef-b86130a720dd. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1572.943075] env[68673]: DEBUG oslo_concurrency.lockutils [req-acc83e99-6e0e-4b3f-9eae-e3563cc31cb2 req-933c0736-3149-4647-a10c-826944bceb23 service nova] Acquiring lock "refresh_cache-601dfed1-fb7b-413a-836d-7fda61314c73" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.943238] env[68673]: DEBUG oslo_concurrency.lockutils [req-acc83e99-6e0e-4b3f-9eae-e3563cc31cb2 req-933c0736-3149-4647-a10c-826944bceb23 service nova] Acquired lock "refresh_cache-601dfed1-fb7b-413a-836d-7fda61314c73" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.943412] env[68673]: DEBUG nova.network.neutron [req-acc83e99-6e0e-4b3f-9eae-e3563cc31cb2 req-933c0736-3149-4647-a10c-826944bceb23 service nova] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Refreshing network info cache for port 4abf4ccb-eb50-4537-97ef-b86130a720dd {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1573.220253] env[68673]: DEBUG nova.network.neutron [req-acc83e99-6e0e-4b3f-9eae-e3563cc31cb2 req-933c0736-3149-4647-a10c-826944bceb23 service nova] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Updated VIF entry in instance network info cache for port 4abf4ccb-eb50-4537-97ef-b86130a720dd. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1573.220605] env[68673]: DEBUG nova.network.neutron [req-acc83e99-6e0e-4b3f-9eae-e3563cc31cb2 req-933c0736-3149-4647-a10c-826944bceb23 service nova] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Updating instance_info_cache with network_info: [{"id": "4abf4ccb-eb50-4537-97ef-b86130a720dd", "address": "fa:16:3e:bf:af:d6", "network": {"id": "89182668-7e26-41d4-9384-d7fc20098ee9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1332694438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82ef9a92319d4eb5a1fe96aa42886426", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6a31b224-77d7-48a4-af87-312758611b8e", "external-id": "nsx-vlan-transportzone-761", "segmentation_id": 761, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4abf4ccb-eb", "ovs_interfaceid": "4abf4ccb-eb50-4537-97ef-b86130a720dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.229677] env[68673]: DEBUG oslo_concurrency.lockutils [req-acc83e99-6e0e-4b3f-9eae-e3563cc31cb2 req-933c0736-3149-4647-a10c-826944bceb23 service nova] Releasing lock "refresh_cache-601dfed1-fb7b-413a-836d-7fda61314c73" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.344173] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433570, 'name': CreateVM_Task, 'duration_secs': 2.128955} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.344404] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1573.345093] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.345300] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.345635] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1573.345879] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faa41aa9-1877-403d-b9a2-74c63872471a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.349982] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1573.349982] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52a6e90e-2800-2e3b-a3f6-d190ac30dc4b" [ 1573.349982] env[68673]: _type = "Task" [ 1573.349982] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.357169] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52a6e90e-2800-2e3b-a3f6-d190ac30dc4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.862912] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.862912] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1573.862912] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.059401] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.059724] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.784376] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1607.784674] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1607.784768] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1608.779985] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1609.783067] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1609.783332] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1609.783401] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1609.806036] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806036] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806036] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806036] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806036] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806284] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806284] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806284] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806284] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806397] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1609.806483] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1612.784609] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.784386] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.784637] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1614.783515] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1614.799321] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.799665] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.799665] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.799775] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1614.800884] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e5d4d0-fa31-4244-904e-892c96ea3991 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.809797] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428f0c06-6032-4486-b782-6b9d3f598d01 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.823605] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f64365-6222-4405-aab7-f970db58bc9a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.829727] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255948c9-d20d-468a-bc0e-9136c00fdd53 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.859998] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180899MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1614.860164] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.860356] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.930508] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.930666] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 72c1282a-8a71-4952-a02a-b6dd45269488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.930792] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.930911] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.931045] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.931172] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.931289] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.931402] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.931515] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.931625] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.945390] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.956321] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.966623] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.976756] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1d23d68-3a7f-4f5d-a13f-22f77917dd6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.986398] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.986626] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1614.986769] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1615.146496] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9cc532-d1ac-432c-a5cd-c9f61aafc4f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.154088] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9551cb23-e2f4-446b-82e6-dec9232704d7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.183643] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab275a3-2507-4614-b6ed-316fd5fceb47 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.190876] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508ac6bd-9f5f-444b-974d-1febcef8d9c6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.205620] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.213431] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1615.226545] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1615.226723] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.366s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.584489] env[68673]: WARNING oslo_vmware.rw_handles [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1615.584489] env[68673]: ERROR oslo_vmware.rw_handles [ 1615.585055] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1615.586823] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1615.587086] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Copying Virtual Disk [datastore1] vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/5de4e59f-69b0-45aa-a97e-9450bec5b953/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1615.587367] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95d52a28-a65a-4202-a82d-14d3f2f8e69e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.595996] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Waiting for the task: (returnval){ [ 1615.595996] env[68673]: value = "task-3433574" [ 1615.595996] env[68673]: _type = "Task" [ 1615.595996] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.603898] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Task: {'id': task-3433574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.107831] env[68673]: DEBUG oslo_vmware.exceptions [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1616.107831] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.107831] env[68673]: ERROR nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1616.107831] env[68673]: Faults: ['InvalidArgument'] [ 1616.107831] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Traceback (most recent call last): [ 1616.107831] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1616.107831] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] yield resources [ 1616.107831] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1616.107831] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] self.driver.spawn(context, instance, image_meta, [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] self._fetch_image_if_missing(context, vi) [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] image_cache(vi, tmp_image_ds_loc) [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] vm_util.copy_virtual_disk( [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] session._wait_for_task(vmdk_copy_task) [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] return self.wait_for_task(task_ref) [ 1616.108525] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] return evt.wait() [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] result = hub.switch() [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] return self.greenlet.switch() [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] self.f(*self.args, **self.kw) [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] raise exceptions.translate_fault(task_info.error) [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Faults: ['InvalidArgument'] [ 1616.108921] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] [ 1616.109297] env[68673]: INFO nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Terminating instance [ 1616.109297] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.109297] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1616.109297] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90840471-3539-4d54-95c6-c81782fa873f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.111178] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.111337] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquired lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.111501] env[68673]: DEBUG nova.network.neutron [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1616.118165] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1616.118299] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1616.119447] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e0a2416-2871-4a57-83db-64cf0b3008db {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.126449] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1616.126449] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52ad73a5-bbc8-623a-5807-30bbc8b56589" [ 1616.126449] env[68673]: _type = "Task" [ 1616.126449] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.134014] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52ad73a5-bbc8-623a-5807-30bbc8b56589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.176019] env[68673]: DEBUG nova.network.neutron [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1616.279006] env[68673]: DEBUG nova.network.neutron [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.287643] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Releasing lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.288036] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1616.288228] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1616.289278] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4add7f15-aeae-4cb2-9433-32d2545c14ad {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.297150] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1616.297375] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c89ba8ec-fbfe-4065-8391-572e630aaaf2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.322443] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1616.322652] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1616.322827] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Deleting the datastore file [datastore1] 72c1282a-8a71-4952-a02a-b6dd45269488 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1616.323342] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e1c7a4e-2220-4360-9d3e-e2ea0f57e5aa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.329007] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Waiting for the task: (returnval){ [ 1616.329007] env[68673]: value = "task-3433576" [ 1616.329007] env[68673]: _type = "Task" [ 1616.329007] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.336312] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Task: {'id': task-3433576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.636585] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1616.636812] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating directory with path [datastore1] vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1616.637065] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5a514d4-0773-4a57-a0d0-e1ded9d59e69 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.647939] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created directory with path [datastore1] vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1616.648210] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Fetch image to [datastore1] vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1616.648394] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1616.649187] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140deff7-087d-4360-8d6c-4bfb269b38b4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.655734] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857ddd13-f0c7-4ccb-b46c-990da99c8f83 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.664552] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32682bd0-685b-4766-8648-5615ac7c4849 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.695946] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cca4476-22dd-417c-843a-cdabc09f1856 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.701315] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9ce95ca7-d646-4f03-9536-2b1d91d4d1fa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.722216] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1616.769071] env[68673]: DEBUG oslo_vmware.rw_handles [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1616.829497] env[68673]: DEBUG oslo_vmware.rw_handles [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1616.829701] env[68673]: DEBUG oslo_vmware.rw_handles [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1616.838562] env[68673]: DEBUG oslo_vmware.api [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Task: {'id': task-3433576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032135} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.840418] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1616.840418] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1616.840418] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1616.840418] env[68673]: INFO nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Took 0.55 seconds to destroy the instance on the hypervisor. [ 1616.840418] env[68673]: DEBUG oslo.service.loopingcall [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1616.840637] env[68673]: DEBUG nova.compute.manager [-] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network deallocation for instance since networking was not requested. {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1616.841983] env[68673]: DEBUG nova.compute.claims [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1616.842863] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.842863] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.055012] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0602d5b5-930e-4006-9487-83127f80ca08 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.062872] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21d75f6-c500-44bb-a755-11d3a531b8e5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.091816] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219839af-796c-4c9a-9f4a-535d8a1664c5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.098816] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d68c73-8d69-499f-8713-a0170103272c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.112515] env[68673]: DEBUG nova.compute.provider_tree [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.120723] env[68673]: DEBUG nova.scheduler.client.report [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1617.133585] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.291s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.134100] env[68673]: ERROR nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1617.134100] env[68673]: Faults: ['InvalidArgument'] [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Traceback (most recent call last): [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] self.driver.spawn(context, instance, image_meta, [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] self._fetch_image_if_missing(context, vi) [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] image_cache(vi, tmp_image_ds_loc) [ 1617.134100] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] vm_util.copy_virtual_disk( [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] session._wait_for_task(vmdk_copy_task) [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] return self.wait_for_task(task_ref) [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] return evt.wait() [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] result = hub.switch() [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] return self.greenlet.switch() [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1617.134407] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] self.f(*self.args, **self.kw) [ 1617.134719] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1617.134719] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] raise exceptions.translate_fault(task_info.error) [ 1617.134719] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1617.134719] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Faults: ['InvalidArgument'] [ 1617.134719] env[68673]: ERROR nova.compute.manager [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] [ 1617.134834] env[68673]: DEBUG nova.compute.utils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1617.136160] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Build of instance 72c1282a-8a71-4952-a02a-b6dd45269488 was re-scheduled: A specified parameter was not correct: fileType [ 1617.136160] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1617.136528] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1617.136745] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.136890] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquired lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.137060] env[68673]: DEBUG nova.network.neutron [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1617.159587] env[68673]: DEBUG nova.network.neutron [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1617.217553] env[68673]: DEBUG nova.network.neutron [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.226244] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Releasing lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.226523] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1617.226734] env[68673]: DEBUG nova.compute.manager [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Skipping network deallocation for instance since networking was not requested. {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1617.309793] env[68673]: INFO nova.scheduler.client.report [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Deleted allocations for instance 72c1282a-8a71-4952-a02a-b6dd45269488 [ 1617.330052] env[68673]: DEBUG oslo_concurrency.lockutils [None req-42df88c6-291b-454c-a552-d7c2b9d7ff3f tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "72c1282a-8a71-4952-a02a-b6dd45269488" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.270s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.331219] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "72c1282a-8a71-4952-a02a-b6dd45269488" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.474s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.331439] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "72c1282a-8a71-4952-a02a-b6dd45269488-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.331643] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "72c1282a-8a71-4952-a02a-b6dd45269488-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.331802] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "72c1282a-8a71-4952-a02a-b6dd45269488-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.333937] env[68673]: INFO nova.compute.manager [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Terminating instance [ 1617.335612] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquiring lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.335612] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Acquired lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.335841] env[68673]: DEBUG nova.network.neutron [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1617.344836] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1617.362372] env[68673]: DEBUG nova.network.neutron [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1617.397711] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.397952] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.399404] env[68673]: INFO nova.compute.claims [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1617.429853] env[68673]: DEBUG nova.network.neutron [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.440957] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Releasing lock "refresh_cache-72c1282a-8a71-4952-a02a-b6dd45269488" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.441328] env[68673]: DEBUG nova.compute.manager [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1617.441523] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1617.442315] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f850d17-087f-4a1f-9321-62aedcc92d89 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.453761] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867fa3e7-57e2-459d-b254-be60c156e4bc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.484188] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72c1282a-8a71-4952-a02a-b6dd45269488 could not be found. [ 1617.484389] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1617.484567] env[68673]: INFO nova.compute.manager [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1617.484806] env[68673]: DEBUG oslo.service.loopingcall [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.487166] env[68673]: DEBUG nova.compute.manager [-] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1617.487266] env[68673]: DEBUG nova.network.neutron [-] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1617.505613] env[68673]: DEBUG nova.network.neutron [-] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1617.513666] env[68673]: DEBUG nova.network.neutron [-] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.521956] env[68673]: INFO nova.compute.manager [-] [instance: 72c1282a-8a71-4952-a02a-b6dd45269488] Took 0.03 seconds to deallocate network for instance. [ 1617.608183] env[68673]: DEBUG oslo_concurrency.lockutils [None req-133fd5c9-bddb-4b5f-bf58-aa374d0d9f46 tempest-ServersAaction247Test-596230607 tempest-ServersAaction247Test-596230607-project-member] Lock "72c1282a-8a71-4952-a02a-b6dd45269488" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.277s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.634565] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf86cb8-4860-4aed-9e24-f3dee43057ea {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.642754] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38adf42-7f9f-4653-a90a-3856429dd058 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.672605] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11b14de-2dc0-46ff-8c93-c9bc5e07ac1d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.679615] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8545ebcb-c9df-4e70-a2af-c30023cd30d9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.692407] env[68673]: DEBUG nova.compute.provider_tree [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.700911] env[68673]: DEBUG nova.scheduler.client.report [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1617.713686] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.714154] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1617.751482] env[68673]: DEBUG nova.compute.utils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1617.752693] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1617.752867] env[68673]: DEBUG nova.network.neutron [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1617.762024] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1617.820395] env[68673]: DEBUG nova.policy [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e90a825630094004a4f8aa69b766507e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e386fa1e07744a79859b6f204a4af731', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1617.823370] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1617.849186] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1617.849438] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1617.849596] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1617.849772] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1617.849915] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1617.850072] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1617.850281] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1617.850474] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1617.850653] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1617.850817] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1617.850986] env[68673]: DEBUG nova.virt.hardware [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1617.851846] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0c776e-3df2-4e6a-860b-0154c9d6d8dd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.859752] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c5a197-0a08-4103-8e8f-05a143a9d5b7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.225439] env[68673]: DEBUG nova.network.neutron [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Successfully created port: a8a80db8-82e5-48b0-9cd8-ff3b3f916235 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1618.769554] env[68673]: DEBUG nova.compute.manager [req-92b3065e-30c1-472a-acf8-ac0655a9fdfb req-d3dc5cac-7c21-4089-b687-9ab76ffbca66 service nova] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Received event network-vif-plugged-a8a80db8-82e5-48b0-9cd8-ff3b3f916235 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1618.769782] env[68673]: DEBUG oslo_concurrency.lockutils [req-92b3065e-30c1-472a-acf8-ac0655a9fdfb req-d3dc5cac-7c21-4089-b687-9ab76ffbca66 service nova] Acquiring lock "2a8badf2-c080-46dc-be89-4c73bb88cc01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.769991] env[68673]: DEBUG oslo_concurrency.lockutils [req-92b3065e-30c1-472a-acf8-ac0655a9fdfb req-d3dc5cac-7c21-4089-b687-9ab76ffbca66 service nova] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.770177] env[68673]: DEBUG oslo_concurrency.lockutils [req-92b3065e-30c1-472a-acf8-ac0655a9fdfb req-d3dc5cac-7c21-4089-b687-9ab76ffbca66 service nova] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.770345] env[68673]: DEBUG nova.compute.manager [req-92b3065e-30c1-472a-acf8-ac0655a9fdfb req-d3dc5cac-7c21-4089-b687-9ab76ffbca66 service nova] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] No waiting events found dispatching network-vif-plugged-a8a80db8-82e5-48b0-9cd8-ff3b3f916235 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1618.770507] env[68673]: WARNING nova.compute.manager [req-92b3065e-30c1-472a-acf8-ac0655a9fdfb req-d3dc5cac-7c21-4089-b687-9ab76ffbca66 service nova] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Received unexpected event network-vif-plugged-a8a80db8-82e5-48b0-9cd8-ff3b3f916235 for instance with vm_state building and task_state spawning. [ 1618.897183] env[68673]: DEBUG nova.network.neutron [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Successfully updated port: a8a80db8-82e5-48b0-9cd8-ff3b3f916235 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1618.909626] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquiring lock "refresh_cache-2a8badf2-c080-46dc-be89-4c73bb88cc01" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.909799] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquired lock "refresh_cache-2a8badf2-c080-46dc-be89-4c73bb88cc01" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.909951] env[68673]: DEBUG nova.network.neutron [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1618.948625] env[68673]: DEBUG nova.network.neutron [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1619.134641] env[68673]: DEBUG nova.network.neutron [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Updating instance_info_cache with network_info: [{"id": "a8a80db8-82e5-48b0-9cd8-ff3b3f916235", "address": "fa:16:3e:fb:3f:7e", "network": {"id": "918c5bb6-6640-4bd7-adf3-786254400444", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-821297518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e386fa1e07744a79859b6f204a4af731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a80db8-82", "ovs_interfaceid": "a8a80db8-82e5-48b0-9cd8-ff3b3f916235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.146107] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Releasing lock "refresh_cache-2a8badf2-c080-46dc-be89-4c73bb88cc01" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.146410] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Instance network_info: |[{"id": "a8a80db8-82e5-48b0-9cd8-ff3b3f916235", "address": "fa:16:3e:fb:3f:7e", "network": {"id": "918c5bb6-6640-4bd7-adf3-786254400444", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-821297518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e386fa1e07744a79859b6f204a4af731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a80db8-82", "ovs_interfaceid": "a8a80db8-82e5-48b0-9cd8-ff3b3f916235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1619.146800] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:3f:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8a80db8-82e5-48b0-9cd8-ff3b3f916235', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1619.154348] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Creating folder: Project (e386fa1e07744a79859b6f204a4af731). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1619.154884] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65ae056b-dc31-4cde-a5df-d0f39e9bb2d8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.165533] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Created folder: Project (e386fa1e07744a79859b6f204a4af731) in parent group-v685311. [ 1619.167893] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Creating folder: Instances. Parent ref: group-v685402. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1619.167893] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52df3e99-0ac8-425f-8c3a-2b01a090729c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.174737] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Created folder: Instances in parent group-v685402. [ 1619.174951] env[68673]: DEBUG oslo.service.loopingcall [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.175149] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1619.175346] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8275d4e-6b10-4d86-895b-cabe945857c7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.193852] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1619.193852] env[68673]: value = "task-3433579" [ 1619.193852] env[68673]: _type = "Task" [ 1619.193852] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.200997] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433579, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.227042] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1619.328172] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.705063] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433579, 'name': CreateVM_Task, 'duration_secs': 0.285655} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.705063] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1619.705231] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.705380] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.705688] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1619.705926] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-966e6fde-7d1a-479f-b587-48c6bd7280df {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.710013] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Waiting for the task: (returnval){ [ 1619.710013] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]522007cb-226f-e8ec-dc20-aaf1ee0f54d0" [ 1619.710013] env[68673]: _type = "Task" [ 1619.710013] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.717014] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]522007cb-226f-e8ec-dc20-aaf1ee0f54d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.219554] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.219820] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1620.220042] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.911419] env[68673]: DEBUG nova.compute.manager [req-98e8c467-8097-4910-ad99-ef4043819795 req-c5f787c8-653f-4f62-8a3a-eb722e3046ca service nova] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Received event network-changed-a8a80db8-82e5-48b0-9cd8-ff3b3f916235 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1620.911653] env[68673]: DEBUG nova.compute.manager [req-98e8c467-8097-4910-ad99-ef4043819795 req-c5f787c8-653f-4f62-8a3a-eb722e3046ca service nova] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Refreshing instance network info cache due to event network-changed-a8a80db8-82e5-48b0-9cd8-ff3b3f916235. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1620.911793] env[68673]: DEBUG oslo_concurrency.lockutils [req-98e8c467-8097-4910-ad99-ef4043819795 req-c5f787c8-653f-4f62-8a3a-eb722e3046ca service nova] Acquiring lock "refresh_cache-2a8badf2-c080-46dc-be89-4c73bb88cc01" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.911933] env[68673]: DEBUG oslo_concurrency.lockutils [req-98e8c467-8097-4910-ad99-ef4043819795 req-c5f787c8-653f-4f62-8a3a-eb722e3046ca service nova] Acquired lock "refresh_cache-2a8badf2-c080-46dc-be89-4c73bb88cc01" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.912103] env[68673]: DEBUG nova.network.neutron [req-98e8c467-8097-4910-ad99-ef4043819795 req-c5f787c8-653f-4f62-8a3a-eb722e3046ca service nova] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Refreshing network info cache for port a8a80db8-82e5-48b0-9cd8-ff3b3f916235 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1621.161954] env[68673]: DEBUG nova.network.neutron [req-98e8c467-8097-4910-ad99-ef4043819795 req-c5f787c8-653f-4f62-8a3a-eb722e3046ca service nova] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Updated VIF entry in instance network info cache for port a8a80db8-82e5-48b0-9cd8-ff3b3f916235. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1621.162324] env[68673]: DEBUG nova.network.neutron [req-98e8c467-8097-4910-ad99-ef4043819795 req-c5f787c8-653f-4f62-8a3a-eb722e3046ca service nova] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Updating instance_info_cache with network_info: [{"id": "a8a80db8-82e5-48b0-9cd8-ff3b3f916235", "address": "fa:16:3e:fb:3f:7e", "network": {"id": "918c5bb6-6640-4bd7-adf3-786254400444", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-821297518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e386fa1e07744a79859b6f204a4af731", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a80db8-82", "ovs_interfaceid": "a8a80db8-82e5-48b0-9cd8-ff3b3f916235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.171820] env[68673]: DEBUG oslo_concurrency.lockutils [req-98e8c467-8097-4910-ad99-ef4043819795 req-c5f787c8-653f-4f62-8a3a-eb722e3046ca service nova] Releasing lock "refresh_cache-2a8badf2-c080-46dc-be89-4c73bb88cc01" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.288475] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquiring lock "601dfed1-fb7b-413a-836d-7fda61314c73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.229672] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquiring lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.807385] env[68673]: WARNING oslo_vmware.rw_handles [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1662.807385] env[68673]: ERROR oslo_vmware.rw_handles [ 1662.808026] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1662.810071] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1662.810323] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Copying Virtual Disk [datastore1] vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/3741fb56-4949-4f10-a192-8cadcd7d0c5e/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1662.810614] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ba50a0a-da97-41b1-8504-c3146e38cca2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.819585] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1662.819585] env[68673]: value = "task-3433580" [ 1662.819585] env[68673]: _type = "Task" [ 1662.819585] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.827392] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.330043] env[68673]: DEBUG oslo_vmware.exceptions [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1663.330341] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.330895] env[68673]: ERROR nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1663.330895] env[68673]: Faults: ['InvalidArgument'] [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Traceback (most recent call last): [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] yield resources [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] self.driver.spawn(context, instance, image_meta, [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] self._fetch_image_if_missing(context, vi) [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1663.330895] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] image_cache(vi, tmp_image_ds_loc) [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] vm_util.copy_virtual_disk( [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] session._wait_for_task(vmdk_copy_task) [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] return self.wait_for_task(task_ref) [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] return evt.wait() [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] result = hub.switch() [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] return self.greenlet.switch() [ 1663.331394] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1663.331791] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] self.f(*self.args, **self.kw) [ 1663.331791] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1663.331791] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] raise exceptions.translate_fault(task_info.error) [ 1663.331791] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1663.331791] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Faults: ['InvalidArgument'] [ 1663.331791] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] [ 1663.331791] env[68673]: INFO nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Terminating instance [ 1663.332875] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.333108] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1663.333345] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d000b06-18e8-4bdb-b755-d1fcdac74743 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.336449] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1663.336643] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1663.337356] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbee0a98-4ab0-40d9-8fff-d85e58923715 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.344146] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1663.344358] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7aa22580-a4c1-4a4f-8f3a-077616369ece {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.346375] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1663.346545] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1663.347461] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-464d7e2f-e09d-460b-a3df-a511970ede1f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.352930] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Waiting for the task: (returnval){ [ 1663.352930] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52b1adb0-e77d-d2d0-c68c-d66796c41e7e" [ 1663.352930] env[68673]: _type = "Task" [ 1663.352930] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.359910] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52b1adb0-e77d-d2d0-c68c-d66796c41e7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.417476] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1663.417673] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1663.417842] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleting the datastore file [datastore1] 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1663.418122] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c9953ef-a437-4147-b50f-79a20defc640 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.423646] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1663.423646] env[68673]: value = "task-3433582" [ 1663.423646] env[68673]: _type = "Task" [ 1663.423646] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.431115] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.863646] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1663.864149] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Creating directory with path [datastore1] vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1663.864149] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c91df12-f851-49c8-ab50-60e959a77454 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.875601] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Created directory with path [datastore1] vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1663.875779] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Fetch image to [datastore1] vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1663.875953] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1663.876688] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c50787-e015-4b7c-88cb-628e4ea02fcc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.882993] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22901ae-7b56-4582-9c32-3f57d035072c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.891707] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049bfa1a-8bb1-4c5a-9081-084715016b73 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.921252] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbb09d1-0886-4958-b84c-bca5d0d37fcf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.929035] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1ee9635b-660d-4630-9aab-c15bcc170f5f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.933081] env[68673]: DEBUG oslo_vmware.api [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085125} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.933574] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1663.933755] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1663.933924] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1663.934140] env[68673]: INFO nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1663.936184] env[68673]: DEBUG nova.compute.claims [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1663.936353] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.936555] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.957147] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1664.094805] env[68673]: DEBUG oslo_vmware.rw_handles [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1664.158500] env[68673]: DEBUG oslo_vmware.rw_handles [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1664.158762] env[68673]: DEBUG oslo_vmware.rw_handles [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1664.209216] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec758da-85cd-496d-a62f-1cee969b1769 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.217805] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80978a1-feab-40f0-8c28-fb9a8066c51f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.246850] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0cb5c5-dfe1-42bc-b97e-3545380ed712 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.253596] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d99a291-509e-48a0-80ee-4839d0200c09 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.266601] env[68673]: DEBUG nova.compute.provider_tree [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1664.275884] env[68673]: DEBUG nova.scheduler.client.report [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1664.290629] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.354s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.291174] env[68673]: ERROR nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1664.291174] env[68673]: Faults: ['InvalidArgument'] [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Traceback (most recent call last): [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] self.driver.spawn(context, instance, image_meta, [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] self._fetch_image_if_missing(context, vi) [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] image_cache(vi, tmp_image_ds_loc) [ 1664.291174] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] vm_util.copy_virtual_disk( [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] session._wait_for_task(vmdk_copy_task) [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] return self.wait_for_task(task_ref) [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] return evt.wait() [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] result = hub.switch() [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] return self.greenlet.switch() [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1664.291501] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] self.f(*self.args, **self.kw) [ 1664.291910] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1664.291910] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] raise exceptions.translate_fault(task_info.error) [ 1664.291910] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1664.291910] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Faults: ['InvalidArgument'] [ 1664.291910] env[68673]: ERROR nova.compute.manager [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] [ 1664.291910] env[68673]: DEBUG nova.compute.utils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1664.293289] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Build of instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 was re-scheduled: A specified parameter was not correct: fileType [ 1664.293289] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1664.293657] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1664.293829] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1664.294040] env[68673]: DEBUG nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1664.294220] env[68673]: DEBUG nova.network.neutron [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1664.591717] env[68673]: DEBUG nova.network.neutron [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.605359] env[68673]: INFO nova.compute.manager [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Took 0.31 seconds to deallocate network for instance. [ 1664.705041] env[68673]: INFO nova.scheduler.client.report [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleted allocations for instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 [ 1664.725967] env[68673]: DEBUG oslo_concurrency.lockutils [None req-13b28842-2725-4de1-a106-0dec618b1348 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.462s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.727268] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.722s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.727595] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.727865] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.728144] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.730711] env[68673]: INFO nova.compute.manager [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Terminating instance [ 1664.732751] env[68673]: DEBUG nova.compute.manager [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1664.733304] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1664.733551] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9348f771-dae7-4bbd-99fc-74cd798cfba6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.739550] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1664.745824] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a16c02-190f-420f-b759-807d7a61c444 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.776762] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3 could not be found. [ 1664.776905] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1664.777092] env[68673]: INFO nova.compute.manager [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1664.777339] env[68673]: DEBUG oslo.service.loopingcall [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1664.777575] env[68673]: DEBUG nova.compute.manager [-] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1664.777668] env[68673]: DEBUG nova.network.neutron [-] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1664.796793] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.797038] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.798486] env[68673]: INFO nova.compute.claims [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1664.803251] env[68673]: DEBUG nova.network.neutron [-] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.812022] env[68673]: INFO nova.compute.manager [-] [instance: 3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3] Took 0.03 seconds to deallocate network for instance. [ 1664.906336] env[68673]: DEBUG oslo_concurrency.lockutils [None req-3ff9c2d1-ad3b-4992-9a5f-3eb791e9f8e0 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "3915f9a4-26e7-4d7f-8fcf-5b4de65a2ce3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.999757] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6684f94-6916-4bd9-acb5-b071963b2cfb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.007372] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb9d25a-b4da-46de-b9fe-ca80ed32cba4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.036990] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00de3e14-4ab2-4c02-b6cb-bc761e27d150 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.043303] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a7a0dc-4ad4-40dd-895e-dcd6c60ed092 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.056118] env[68673]: DEBUG nova.compute.provider_tree [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1665.064626] env[68673]: DEBUG nova.scheduler.client.report [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1665.079499] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.282s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.080117] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1665.114426] env[68673]: DEBUG nova.compute.utils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1665.115517] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1665.115702] env[68673]: DEBUG nova.network.neutron [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1665.124612] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1665.196053] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1665.207683] env[68673]: DEBUG nova.policy [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0b29afc3a154e19831a76ca2fe1aeef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60eb74f0bbe44955a92f1ee0b6099ec7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1665.222623] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1665.222862] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1665.223032] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1665.223236] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1665.223405] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1665.223554] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1665.223761] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1665.223921] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1665.224273] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1665.224470] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1665.224650] env[68673]: DEBUG nova.virt.hardware [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1665.225510] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451af1b9-aa80-4b5e-8caf-bb9899dec591 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.233455] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609df1d2-4b18-46fc-954f-40198dc395bf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.607398] env[68673]: DEBUG nova.network.neutron [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Successfully created port: 6c2d131b-7def-40d2-9f0a-fa89d8eba8a8 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1666.213461] env[68673]: DEBUG nova.network.neutron [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Successfully updated port: 6c2d131b-7def-40d2-9f0a-fa89d8eba8a8 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1666.226406] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquiring lock "refresh_cache-3352e87c-38dd-4bfa-937c-644abc30cf76" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.226579] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquired lock "refresh_cache-3352e87c-38dd-4bfa-937c-644abc30cf76" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.226729] env[68673]: DEBUG nova.network.neutron [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1666.445990] env[68673]: DEBUG nova.network.neutron [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1666.622810] env[68673]: DEBUG nova.network.neutron [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Updating instance_info_cache with network_info: [{"id": "6c2d131b-7def-40d2-9f0a-fa89d8eba8a8", "address": "fa:16:3e:87:aa:9c", "network": {"id": "ea0d99db-659f-41e4-a84a-ff4bc5c94fc0", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1232755130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60eb74f0bbe44955a92f1ee0b6099ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2d131b-7d", "ovs_interfaceid": "6c2d131b-7def-40d2-9f0a-fa89d8eba8a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.630200] env[68673]: DEBUG nova.compute.manager [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Received event network-vif-plugged-6c2d131b-7def-40d2-9f0a-fa89d8eba8a8 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1666.630423] env[68673]: DEBUG oslo_concurrency.lockutils [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] Acquiring lock "3352e87c-38dd-4bfa-937c-644abc30cf76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.630687] env[68673]: DEBUG oslo_concurrency.lockutils [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.630829] env[68673]: DEBUG oslo_concurrency.lockutils [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.630950] env[68673]: DEBUG nova.compute.manager [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] No waiting events found dispatching network-vif-plugged-6c2d131b-7def-40d2-9f0a-fa89d8eba8a8 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1666.631136] env[68673]: WARNING nova.compute.manager [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Received unexpected event network-vif-plugged-6c2d131b-7def-40d2-9f0a-fa89d8eba8a8 for instance with vm_state building and task_state spawning. [ 1666.631300] env[68673]: DEBUG nova.compute.manager [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Received event network-changed-6c2d131b-7def-40d2-9f0a-fa89d8eba8a8 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1666.631566] env[68673]: DEBUG nova.compute.manager [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Refreshing instance network info cache due to event network-changed-6c2d131b-7def-40d2-9f0a-fa89d8eba8a8. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1666.631645] env[68673]: DEBUG oslo_concurrency.lockutils [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] Acquiring lock "refresh_cache-3352e87c-38dd-4bfa-937c-644abc30cf76" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.635728] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Releasing lock "refresh_cache-3352e87c-38dd-4bfa-937c-644abc30cf76" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.635987] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Instance network_info: |[{"id": "6c2d131b-7def-40d2-9f0a-fa89d8eba8a8", "address": "fa:16:3e:87:aa:9c", "network": {"id": "ea0d99db-659f-41e4-a84a-ff4bc5c94fc0", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1232755130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60eb74f0bbe44955a92f1ee0b6099ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2d131b-7d", "ovs_interfaceid": "6c2d131b-7def-40d2-9f0a-fa89d8eba8a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1666.636262] env[68673]: DEBUG oslo_concurrency.lockutils [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] Acquired lock "refresh_cache-3352e87c-38dd-4bfa-937c-644abc30cf76" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.636459] env[68673]: DEBUG nova.network.neutron [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Refreshing network info cache for port 6c2d131b-7def-40d2-9f0a-fa89d8eba8a8 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1666.637466] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:aa:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c2d131b-7def-40d2-9f0a-fa89d8eba8a8', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1666.644944] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Creating folder: Project (60eb74f0bbe44955a92f1ee0b6099ec7). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1666.647977] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32e4f2f9-89da-4bf9-8eb2-313cd4993777 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.658996] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Created folder: Project (60eb74f0bbe44955a92f1ee0b6099ec7) in parent group-v685311. [ 1666.659220] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Creating folder: Instances. Parent ref: group-v685405. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1666.659420] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24146718-ee92-4bc2-9c27-5b7fc719165c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.667761] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Created folder: Instances in parent group-v685405. [ 1666.667983] env[68673]: DEBUG oslo.service.loopingcall [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1666.668171] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1666.668445] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-293098a0-a612-42df-8967-5754e60d6c92 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.686926] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1666.686926] env[68673]: value = "task-3433585" [ 1666.686926] env[68673]: _type = "Task" [ 1666.686926] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.697163] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433585, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.910805] env[68673]: DEBUG nova.network.neutron [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Updated VIF entry in instance network info cache for port 6c2d131b-7def-40d2-9f0a-fa89d8eba8a8. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1666.911181] env[68673]: DEBUG nova.network.neutron [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Updating instance_info_cache with network_info: [{"id": "6c2d131b-7def-40d2-9f0a-fa89d8eba8a8", "address": "fa:16:3e:87:aa:9c", "network": {"id": "ea0d99db-659f-41e4-a84a-ff4bc5c94fc0", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1232755130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60eb74f0bbe44955a92f1ee0b6099ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2d131b-7d", "ovs_interfaceid": "6c2d131b-7def-40d2-9f0a-fa89d8eba8a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.921851] env[68673]: DEBUG oslo_concurrency.lockutils [req-d9961c60-6d4f-4dae-8241-05edd9ed6e49 req-1b3b7c1e-576c-4011-9cf8-b3bad9e9d79f service nova] Releasing lock "refresh_cache-3352e87c-38dd-4bfa-937c-644abc30cf76" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.196897] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433585, 'name': CreateVM_Task, 'duration_secs': 0.284247} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.197121] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1667.197708] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.197874] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.198211] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1667.198444] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88634d53-ccf6-4445-a767-4fdf63d24dd2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.202722] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Waiting for the task: (returnval){ [ 1667.202722] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52660fae-50ba-fe1a-d7d1-64f27e043a1d" [ 1667.202722] env[68673]: _type = "Task" [ 1667.202722] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.209654] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52660fae-50ba-fe1a-d7d1-64f27e043a1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.712969] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.712969] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1667.712969] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.783886] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.783886] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1668.778748] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1669.783586] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.783622] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.783902] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1670.783902] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1670.805255] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.805440] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.805559] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.805682] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.805801] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.805917] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.806043] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.806162] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.806276] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.806390] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1670.806499] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1673.783905] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1674.784589] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1674.795375] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.795627] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.795793] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.795947] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1674.797382] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e649179-2447-4573-9488-170ad8c5638a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.806388] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef42eb8-a5f1-422a-9944-213b67e8371a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.821938] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c9bfec-ab1a-4f56-984c-f77a1eab548a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.831530] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e2202f-c412-436f-85a9-ab9dd564ceb3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.863991] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180933MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1674.864198] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.864404] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.979161] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.979427] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.979508] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.979613] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.979738] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.979853] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.979968] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.980097] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.980212] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.980569] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1674.993138] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1675.003731] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1d23d68-3a7f-4f5d-a13f-22f77917dd6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1675.014880] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1675.015171] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1675.015261] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1675.216564] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb48ce37-b297-4ea0-80b3-5cf70043256c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.224033] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cc9768-0f50-4e33-88aa-a53a28ce7c82 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.256384] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b121938-e031-4591-9bf8-adc31e2fd256 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.263329] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10eae1d-3693-40f3-80bb-9539b3b563f8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.277139] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.285788] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1675.300007] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1675.300224] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.436s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.296144] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1676.326823] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1676.326823] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1678.725255] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.725519] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.782782] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1692.896051] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquiring lock "3352e87c-38dd-4bfa-937c-644abc30cf76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.017670] env[68673]: WARNING oslo_vmware.rw_handles [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1714.017670] env[68673]: ERROR oslo_vmware.rw_handles [ 1714.018592] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1714.020578] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1714.020839] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Copying Virtual Disk [datastore1] vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/f5d0b5a1-26d0-4dc3-bd5d-82f9202993cf/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1714.021167] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a13a88f7-1977-48d8-8b14-1b816b31f8b4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.029314] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Waiting for the task: (returnval){ [ 1714.029314] env[68673]: value = "task-3433586" [ 1714.029314] env[68673]: _type = "Task" [ 1714.029314] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.037071] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Task: {'id': task-3433586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.540035] env[68673]: DEBUG oslo_vmware.exceptions [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1714.540188] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.540725] env[68673]: ERROR nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1714.540725] env[68673]: Faults: ['InvalidArgument'] [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Traceback (most recent call last): [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] yield resources [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] self.driver.spawn(context, instance, image_meta, [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] self._fetch_image_if_missing(context, vi) [ 1714.540725] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] image_cache(vi, tmp_image_ds_loc) [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] vm_util.copy_virtual_disk( [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] session._wait_for_task(vmdk_copy_task) [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] return self.wait_for_task(task_ref) [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] return evt.wait() [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] result = hub.switch() [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1714.541141] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] return self.greenlet.switch() [ 1714.541488] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1714.541488] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] self.f(*self.args, **self.kw) [ 1714.541488] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1714.541488] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] raise exceptions.translate_fault(task_info.error) [ 1714.541488] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1714.541488] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Faults: ['InvalidArgument'] [ 1714.541488] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] [ 1714.541488] env[68673]: INFO nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Terminating instance [ 1714.542666] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.542888] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1714.543123] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71337d1b-1704-44ac-a71a-e2bdd047b737 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.545626] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1714.545738] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1714.546462] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcab6c41-bf87-4c7f-a9eb-f32820d70cfc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.553333] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1714.553568] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a285d195-3687-4470-ab07-27de53e2cb0f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.555726] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1714.555932] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1714.556940] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93d618b8-4b53-4a5c-8aa3-7ea4a95a0dc8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.564291] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 1714.564291] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52e1f460-1a7a-9f18-6430-b5a65c2bd725" [ 1714.564291] env[68673]: _type = "Task" [ 1714.564291] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.569601] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52e1f460-1a7a-9f18-6430-b5a65c2bd725, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.622138] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1714.622358] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1714.622533] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Deleting the datastore file [datastore1] 074705bc-7378-43f8-8ed3-b3b5ea38f50a {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1714.622804] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f5888d3-3269-47e0-be5d-3dfd8073e9e8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.629737] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Waiting for the task: (returnval){ [ 1714.629737] env[68673]: value = "task-3433588" [ 1714.629737] env[68673]: _type = "Task" [ 1714.629737] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.637552] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Task: {'id': task-3433588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.073844] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1715.074226] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating directory with path [datastore1] vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1715.074374] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a11c55b-b3cc-4bdb-9f19-66d5df4f40f3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.084979] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Created directory with path [datastore1] vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1715.085195] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Fetch image to [datastore1] vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1715.085366] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1715.086091] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d67e87e-a779-4ff1-83ad-62c791281af5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.092363] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc3b110-651f-499a-b7d6-fdc3a518cfa9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.100955] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0533073-c03e-44f5-9754-18ae6bc28288 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.133942] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211642ef-c2d2-4067-aecf-d623cac72335 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.142483] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-392f8e98-e2b1-432a-99b6-bb2747e163fc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.144166] env[68673]: DEBUG oslo_vmware.api [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Task: {'id': task-3433588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079669} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.144401] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1715.144579] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1715.144746] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1715.144916] env[68673]: INFO nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1715.150812] env[68673]: DEBUG nova.compute.claims [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1715.150981] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.151213] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.166604] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1715.222569] env[68673]: DEBUG oslo_vmware.rw_handles [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1715.285413] env[68673]: DEBUG oslo_vmware.rw_handles [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1715.285624] env[68673]: DEBUG oslo_vmware.rw_handles [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1715.400466] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2ca00f-205d-4de0-b7d3-9ec741320b87 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.407965] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8074377-4f05-471a-a4e2-0b7b1b752333 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.438645] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76d0661-141c-454f-ada9-20096f015f50 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.445949] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0c77cf-b27a-4b80-a0f0-44e38784d9f1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.458558] env[68673]: DEBUG nova.compute.provider_tree [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.467341] env[68673]: DEBUG nova.scheduler.client.report [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1715.483370] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.332s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.483909] env[68673]: ERROR nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1715.483909] env[68673]: Faults: ['InvalidArgument'] [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Traceback (most recent call last): [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] self.driver.spawn(context, instance, image_meta, [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] self._fetch_image_if_missing(context, vi) [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] image_cache(vi, tmp_image_ds_loc) [ 1715.483909] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] vm_util.copy_virtual_disk( [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] session._wait_for_task(vmdk_copy_task) [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] return self.wait_for_task(task_ref) [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] return evt.wait() [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] result = hub.switch() [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] return self.greenlet.switch() [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1715.484274] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] self.f(*self.args, **self.kw) [ 1715.484589] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1715.484589] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] raise exceptions.translate_fault(task_info.error) [ 1715.484589] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1715.484589] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Faults: ['InvalidArgument'] [ 1715.484589] env[68673]: ERROR nova.compute.manager [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] [ 1715.484589] env[68673]: DEBUG nova.compute.utils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1715.485936] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Build of instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a was re-scheduled: A specified parameter was not correct: fileType [ 1715.485936] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1715.486320] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1715.486498] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1715.486667] env[68673]: DEBUG nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1715.486834] env[68673]: DEBUG nova.network.neutron [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1715.905534] env[68673]: DEBUG nova.network.neutron [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.915519] env[68673]: INFO nova.compute.manager [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Took 0.43 seconds to deallocate network for instance. [ 1716.019170] env[68673]: INFO nova.scheduler.client.report [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Deleted allocations for instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a [ 1716.043045] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b12257d9-8822-496a-9af3-bff02fbe7731 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.762s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.044367] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 422.847s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.044597] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Acquiring lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.044803] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.044967] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.047056] env[68673]: INFO nova.compute.manager [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Terminating instance [ 1716.048714] env[68673]: DEBUG nova.compute.manager [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1716.048904] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1716.049428] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bd82165-5db4-49cf-a0d5-ebd55f78fabd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.054286] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1716.060878] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904a35d7-41e5-4c91-8b95-964f8b841a55 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.092614] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 074705bc-7378-43f8-8ed3-b3b5ea38f50a could not be found. [ 1716.092938] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1716.092994] env[68673]: INFO nova.compute.manager [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1716.093251] env[68673]: DEBUG oslo.service.loopingcall [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.095919] env[68673]: DEBUG nova.compute.manager [-] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1716.096039] env[68673]: DEBUG nova.network.neutron [-] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1716.110207] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.110447] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.112035] env[68673]: INFO nova.compute.claims [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1716.122185] env[68673]: DEBUG nova.network.neutron [-] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.141093] env[68673]: INFO nova.compute.manager [-] [instance: 074705bc-7378-43f8-8ed3-b3b5ea38f50a] Took 0.04 seconds to deallocate network for instance. [ 1716.237273] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9989a5b6-670d-4ef1-b9d7-8430581a9ff8 tempest-VolumesAdminNegativeTest-1457270426 tempest-VolumesAdminNegativeTest-1457270426-project-member] Lock "074705bc-7378-43f8-8ed3-b3b5ea38f50a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.323478] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a922562b-c0db-4a39-94ba-15e09ec813ec {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.331098] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42afd00c-913f-4509-a85d-b4028c970125 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.360074] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5390657f-eaef-4d23-9d5f-cf4504d3d994 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.367059] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02ca82b-6b1f-4a1f-85f6-088a010ee25c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.379569] env[68673]: DEBUG nova.compute.provider_tree [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.388296] env[68673]: DEBUG nova.scheduler.client.report [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1716.401618] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.402046] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1716.434946] env[68673]: DEBUG nova.compute.utils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1716.436270] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1716.436363] env[68673]: DEBUG nova.network.neutron [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1716.448097] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1716.492197] env[68673]: DEBUG nova.policy [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a135381c6987442cad21b1f74d5a9e34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8ecabf65f524fb5bfcb60401c45db96', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1716.510362] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1716.535609] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1716.535851] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1716.536044] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1716.536242] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1716.536388] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1716.536532] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1716.536734] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1716.536892] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1716.537099] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1716.537277] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1716.537450] env[68673]: DEBUG nova.virt.hardware [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1716.538321] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aff8c19-3a19-4cf3-b096-8a3a9acc5f88 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.546397] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f73208-fb11-4c62-9d5a-9d59b59c0fcb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.800191] env[68673]: DEBUG nova.network.neutron [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Successfully created port: 8956aee2-3f5d-4af5-9436-62ff4e033ac2 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1717.562573] env[68673]: DEBUG nova.network.neutron [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Successfully updated port: 8956aee2-3f5d-4af5-9436-62ff4e033ac2 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1717.575406] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "refresh_cache-d79f254c-7c00-4cf8-85ac-6db513533da3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.575556] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "refresh_cache-d79f254c-7c00-4cf8-85ac-6db513533da3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.575704] env[68673]: DEBUG nova.network.neutron [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1717.639544] env[68673]: DEBUG nova.network.neutron [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1717.807901] env[68673]: DEBUG nova.network.neutron [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Updating instance_info_cache with network_info: [{"id": "8956aee2-3f5d-4af5-9436-62ff4e033ac2", "address": "fa:16:3e:5e:23:b8", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8956aee2-3f", "ovs_interfaceid": "8956aee2-3f5d-4af5-9436-62ff4e033ac2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.822062] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "refresh_cache-d79f254c-7c00-4cf8-85ac-6db513533da3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.822062] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Instance network_info: |[{"id": "8956aee2-3f5d-4af5-9436-62ff4e033ac2", "address": "fa:16:3e:5e:23:b8", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8956aee2-3f", "ovs_interfaceid": "8956aee2-3f5d-4af5-9436-62ff4e033ac2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1717.822392] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:23:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8956aee2-3f5d-4af5-9436-62ff4e033ac2', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1717.829952] env[68673]: DEBUG oslo.service.loopingcall [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1717.830472] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1717.830696] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41a66561-143e-481e-91e7-612c92ea2885 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.851467] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1717.851467] env[68673]: value = "task-3433589" [ 1717.851467] env[68673]: _type = "Task" [ 1717.851467] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.859214] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433589, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.938659] env[68673]: DEBUG nova.compute.manager [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Received event network-vif-plugged-8956aee2-3f5d-4af5-9436-62ff4e033ac2 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1717.938738] env[68673]: DEBUG oslo_concurrency.lockutils [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] Acquiring lock "d79f254c-7c00-4cf8-85ac-6db513533da3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.938942] env[68673]: DEBUG oslo_concurrency.lockutils [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.939108] env[68673]: DEBUG oslo_concurrency.lockutils [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.939291] env[68673]: DEBUG nova.compute.manager [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] No waiting events found dispatching network-vif-plugged-8956aee2-3f5d-4af5-9436-62ff4e033ac2 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1717.939454] env[68673]: WARNING nova.compute.manager [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Received unexpected event network-vif-plugged-8956aee2-3f5d-4af5-9436-62ff4e033ac2 for instance with vm_state building and task_state spawning. [ 1717.939613] env[68673]: DEBUG nova.compute.manager [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Received event network-changed-8956aee2-3f5d-4af5-9436-62ff4e033ac2 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1717.939764] env[68673]: DEBUG nova.compute.manager [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Refreshing instance network info cache due to event network-changed-8956aee2-3f5d-4af5-9436-62ff4e033ac2. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1717.939948] env[68673]: DEBUG oslo_concurrency.lockutils [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] Acquiring lock "refresh_cache-d79f254c-7c00-4cf8-85ac-6db513533da3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.940088] env[68673]: DEBUG oslo_concurrency.lockutils [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] Acquired lock "refresh_cache-d79f254c-7c00-4cf8-85ac-6db513533da3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.940246] env[68673]: DEBUG nova.network.neutron [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Refreshing network info cache for port 8956aee2-3f5d-4af5-9436-62ff4e033ac2 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1718.233467] env[68673]: DEBUG nova.network.neutron [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Updated VIF entry in instance network info cache for port 8956aee2-3f5d-4af5-9436-62ff4e033ac2. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1718.233939] env[68673]: DEBUG nova.network.neutron [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Updating instance_info_cache with network_info: [{"id": "8956aee2-3f5d-4af5-9436-62ff4e033ac2", "address": "fa:16:3e:5e:23:b8", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8956aee2-3f", "ovs_interfaceid": "8956aee2-3f5d-4af5-9436-62ff4e033ac2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.244663] env[68673]: DEBUG oslo_concurrency.lockutils [req-1a062e7d-272a-471c-b9e4-6e07c4e717ec req-abb46c24-0d9c-4d8b-891f-acf73e8e847b service nova] Releasing lock "refresh_cache-d79f254c-7c00-4cf8-85ac-6db513533da3" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.360893] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433589, 'name': CreateVM_Task, 'duration_secs': 0.314356} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.361061] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1718.361703] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.361875] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.362219] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1718.362447] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd8487c8-ae47-4f45-a030-ed72a46bcf1f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.366476] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 1718.366476] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52d4b3d5-c761-f3ff-45d2-1fce40839782" [ 1718.366476] env[68673]: _type = "Task" [ 1718.366476] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.373471] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52d4b3d5-c761-f3ff-45d2-1fce40839782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.878116] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.878446] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1718.878609] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1728.779852] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.783577] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.783577] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1729.785171] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1731.784339] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1731.784696] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1731.784696] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1731.807165] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.807320] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.807434] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.807566] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.807705] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.807826] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.807945] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.808096] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.808222] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.808337] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1731.808452] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1735.784046] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.784385] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.784470] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.796406] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.796645] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.796824] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.796981] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1735.798071] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0de46e-772b-4a82-9077-72ca1f23c398 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.806904] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248ba9dc-fb59-421a-afd1-1580f8acf5b1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.821133] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d178df-4130-437d-958e-b2320bbadda1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.827762] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8fbf60-5438-43b7-adf2-577d5d757062 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.856393] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180926MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1735.856564] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.856738] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.938872] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c12e8044-6e77-44a6-866e-1036f69113a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.939066] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.939202] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.939324] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.939441] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.939559] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.939674] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.939796] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.939964] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.940107] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1735.951290] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1d23d68-3a7f-4f5d-a13f-22f77917dd6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1735.979855] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1735.990649] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1735.990901] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1735.991034] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1736.140649] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7ffa07-fb34-4519-87a3-82c2a1c8ea13 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.147814] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f8da49-9f2f-4545-a7aa-4ba1be2474d3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.177415] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1ec98b-0ce0-4530-a62a-f196b0454403 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.184891] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2827e323-9c28-4254-b899-01debcd43d9e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.197714] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1736.206407] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1736.219955] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1736.220171] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.363s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.219617] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.783174] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.261551] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "d79f254c-7c00-4cf8-85ac-6db513533da3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.027877] env[68673]: WARNING oslo_vmware.rw_handles [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1762.027877] env[68673]: ERROR oslo_vmware.rw_handles [ 1762.028822] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1762.031350] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1762.031649] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Copying Virtual Disk [datastore1] vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/3e70c6eb-76dd-459b-ae60-020d9f931199/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1762.032053] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c81c12d-8c71-4172-b6d9-255fa5f79cb0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.041297] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 1762.041297] env[68673]: value = "task-3433590" [ 1762.041297] env[68673]: _type = "Task" [ 1762.041297] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.049428] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.550871] env[68673]: DEBUG oslo_vmware.exceptions [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1762.551185] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.551748] env[68673]: ERROR nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1762.551748] env[68673]: Faults: ['InvalidArgument'] [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Traceback (most recent call last): [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] yield resources [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] self.driver.spawn(context, instance, image_meta, [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] self._fetch_image_if_missing(context, vi) [ 1762.551748] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] image_cache(vi, tmp_image_ds_loc) [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] vm_util.copy_virtual_disk( [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] session._wait_for_task(vmdk_copy_task) [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] return self.wait_for_task(task_ref) [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] return evt.wait() [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] result = hub.switch() [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1762.552152] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] return self.greenlet.switch() [ 1762.552567] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1762.552567] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] self.f(*self.args, **self.kw) [ 1762.552567] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1762.552567] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] raise exceptions.translate_fault(task_info.error) [ 1762.552567] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1762.552567] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Faults: ['InvalidArgument'] [ 1762.552567] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] [ 1762.552567] env[68673]: INFO nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Terminating instance [ 1762.553683] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1762.553890] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1762.554146] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c02dff24-4b39-41a3-8152-4798290ff6e2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.556479] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1762.556668] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1762.557402] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa70ee53-b299-41f4-8f58-c92887c08b65 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.563791] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1762.563984] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c619bf9-eeef-4df8-9e54-c1f60ce52424 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.565992] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1762.566186] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1762.567108] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3895a67-4d43-46ab-a38e-ce07790b90ea {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.571643] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 1762.571643] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52c039f3-14c5-9e18-f686-0348301ba56e" [ 1762.571643] env[68673]: _type = "Task" [ 1762.571643] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.578275] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52c039f3-14c5-9e18-f686-0348301ba56e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.639487] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1762.639709] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1762.639890] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleting the datastore file [datastore1] c12e8044-6e77-44a6-866e-1036f69113a2 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1762.640189] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f221373d-905f-4774-9a73-985426bce039 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.647379] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 1762.647379] env[68673]: value = "task-3433592" [ 1762.647379] env[68673]: _type = "Task" [ 1762.647379] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.654806] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.083053] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1763.083053] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating directory with path [datastore1] vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1763.083053] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64e813ef-8acd-4677-be97-08e4d628f9e5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.093266] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Created directory with path [datastore1] vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1763.093449] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Fetch image to [datastore1] vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1763.093616] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1763.094361] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355d5a32-86c5-47e7-8706-2bd59d9bb9aa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.100564] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37692da8-e114-4125-a9ac-15a210c71eec {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.109314] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bd1cb4-bd0a-45eb-a3aa-0b6a65c6a4c3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.141027] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecd6429-72bf-484a-9fe3-7210b05ce618 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.146115] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5c28e1e0-355e-4c5d-9d74-113b746ac5f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.155199] env[68673]: DEBUG oslo_vmware.api [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075149} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.155262] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.156031] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1763.156031] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1763.156031] env[68673]: INFO nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1763.158654] env[68673]: DEBUG nova.compute.claims [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1763.158654] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.158654] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.169510] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1763.222055] env[68673]: DEBUG oslo_vmware.rw_handles [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1763.282195] env[68673]: DEBUG oslo_vmware.rw_handles [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1763.282429] env[68673]: DEBUG oslo_vmware.rw_handles [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1763.395335] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84047f7a-0ceb-4b7c-8cf4-58ef642b5f41 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.402146] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b265db7f-fa60-4147-9470-7df84f4b778e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.431343] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f766194-8cc6-458c-81f3-27f694d4784c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.438009] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9730a1da-f4d3-4838-ac2b-8277cf9ddf38 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.450518] env[68673]: DEBUG nova.compute.provider_tree [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1763.459167] env[68673]: DEBUG nova.scheduler.client.report [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1763.472259] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.314s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.472765] env[68673]: ERROR nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1763.472765] env[68673]: Faults: ['InvalidArgument'] [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Traceback (most recent call last): [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] self.driver.spawn(context, instance, image_meta, [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] self._fetch_image_if_missing(context, vi) [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] image_cache(vi, tmp_image_ds_loc) [ 1763.472765] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] vm_util.copy_virtual_disk( [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] session._wait_for_task(vmdk_copy_task) [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] return self.wait_for_task(task_ref) [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] return evt.wait() [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] result = hub.switch() [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] return self.greenlet.switch() [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1763.473184] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] self.f(*self.args, **self.kw) [ 1763.473823] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1763.473823] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] raise exceptions.translate_fault(task_info.error) [ 1763.473823] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1763.473823] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Faults: ['InvalidArgument'] [ 1763.473823] env[68673]: ERROR nova.compute.manager [instance: c12e8044-6e77-44a6-866e-1036f69113a2] [ 1763.473823] env[68673]: DEBUG nova.compute.utils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1763.474767] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Build of instance c12e8044-6e77-44a6-866e-1036f69113a2 was re-scheduled: A specified parameter was not correct: fileType [ 1763.474767] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1763.475150] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1763.475324] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1763.475492] env[68673]: DEBUG nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1763.475654] env[68673]: DEBUG nova.network.neutron [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1763.748549] env[68673]: DEBUG nova.network.neutron [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.758876] env[68673]: INFO nova.compute.manager [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Took 0.28 seconds to deallocate network for instance. [ 1763.854131] env[68673]: INFO nova.scheduler.client.report [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleted allocations for instance c12e8044-6e77-44a6-866e-1036f69113a2 [ 1763.879660] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bc7a686e-5513-4a5a-bc6b-464b0b7bfa06 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "c12e8044-6e77-44a6-866e-1036f69113a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 610.511s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.880636] env[68673]: DEBUG oslo_concurrency.lockutils [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "c12e8044-6e77-44a6-866e-1036f69113a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 414.903s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.880859] env[68673]: DEBUG oslo_concurrency.lockutils [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "c12e8044-6e77-44a6-866e-1036f69113a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.881074] env[68673]: DEBUG oslo_concurrency.lockutils [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "c12e8044-6e77-44a6-866e-1036f69113a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.881240] env[68673]: DEBUG oslo_concurrency.lockutils [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "c12e8044-6e77-44a6-866e-1036f69113a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.884179] env[68673]: INFO nova.compute.manager [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Terminating instance [ 1763.887954] env[68673]: DEBUG nova.compute.manager [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1763.888167] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1763.888429] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6feda47-2c67-4162-9bc6-91524a61fa94 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.896097] env[68673]: DEBUG nova.compute.manager [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1763.901378] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef101385-09f9-4b13-9883-89f6eb50d86d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.931050] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c12e8044-6e77-44a6-866e-1036f69113a2 could not be found. [ 1763.931273] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1763.931473] env[68673]: INFO nova.compute.manager [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1763.931674] env[68673]: DEBUG oslo.service.loopingcall [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.933858] env[68673]: DEBUG nova.compute.manager [-] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1763.933961] env[68673]: DEBUG nova.network.neutron [-] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1763.947804] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.948060] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.949527] env[68673]: INFO nova.compute.claims [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1763.969935] env[68673]: DEBUG nova.network.neutron [-] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.983350] env[68673]: INFO nova.compute.manager [-] [instance: c12e8044-6e77-44a6-866e-1036f69113a2] Took 0.05 seconds to deallocate network for instance. [ 1763.985737] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.038s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.986400] env[68673]: DEBUG nova.compute.utils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Instance f1d23d68-3a7f-4f5d-a13f-22f77917dd6e could not be found. {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1763.998447] env[68673]: DEBUG nova.compute.manager [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Instance disappeared during build. {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1763.998625] env[68673]: DEBUG nova.compute.manager [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1763.998890] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "refresh_cache-f1d23d68-3a7f-4f5d-a13f-22f77917dd6e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.999104] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "refresh_cache-f1d23d68-3a7f-4f5d-a13f-22f77917dd6e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.999231] env[68673]: DEBUG nova.network.neutron [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1764.010936] env[68673]: DEBUG nova.compute.utils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Can not refresh info_cache because instance was not found {{(pid=68673) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1764.029317] env[68673]: DEBUG nova.network.neutron [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1764.076873] env[68673]: DEBUG oslo_concurrency.lockutils [None req-22b3de40-ba47-48d2-9e51-b1cb61c5d4f6 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "c12e8044-6e77-44a6-866e-1036f69113a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.196s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.201481] env[68673]: DEBUG nova.network.neutron [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.231736] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "refresh_cache-f1d23d68-3a7f-4f5d-a13f-22f77917dd6e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.231933] env[68673]: DEBUG nova.compute.manager [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1764.232137] env[68673]: DEBUG nova.compute.manager [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1764.232305] env[68673]: DEBUG nova.network.neutron [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1764.254848] env[68673]: DEBUG nova.network.neutron [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1764.261727] env[68673]: DEBUG nova.network.neutron [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.270068] env[68673]: INFO nova.compute.manager [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: f1d23d68-3a7f-4f5d-a13f-22f77917dd6e] Took 0.04 seconds to deallocate network for instance. [ 1764.311105] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ed3950ae-0fba-4966-9c95-85f60b1c87ab tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "f1d23d68-3a7f-4f5d-a13f-22f77917dd6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.698s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.320356] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1764.367147] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.367406] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.368958] env[68673]: INFO nova.compute.claims [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1764.544369] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e06faa9-b905-4ab9-8e64-ead2bbc4a740 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.552149] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412cdf9a-1448-420a-8ec3-d139e3595f9c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.582661] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e3e4d6-e798-4e5c-a8f4-05e7c10b622b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.589886] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82984e94-df27-45ea-a553-2e23c0410c28 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.603008] env[68673]: DEBUG nova.compute.provider_tree [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1764.611439] env[68673]: DEBUG nova.scheduler.client.report [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1764.628871] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.261s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.629344] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1764.664711] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "16320084-1a09-493a-8ff2-389da64b92a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.664933] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "16320084-1a09-493a-8ff2-389da64b92a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.665917] env[68673]: DEBUG nova.compute.utils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1764.667267] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1764.667428] env[68673]: DEBUG nova.network.neutron [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1764.679038] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1764.753116] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1764.767009] env[68673]: DEBUG nova.policy [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f617cfb6919840ad99e1320228344b18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c73dd528430445bb8717487ffd7dd780', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1764.781758] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1764.781981] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1764.782157] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1764.782341] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1764.782485] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1764.782629] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1764.782871] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1764.783239] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1764.783335] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1764.783522] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1764.783732] env[68673]: DEBUG nova.virt.hardware [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1764.784618] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ad6112-7706-4222-95aa-b2979ad6032c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.792899] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ed8149-a9c8-416c-b5ef-338c69ffcbd7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.174883] env[68673]: DEBUG nova.network.neutron [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Successfully created port: 15863009-b6e4-43a9-a2d3-14581d3905f4 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1766.012138] env[68673]: DEBUG nova.compute.manager [req-10465503-8be6-4d90-820a-e5314c267e66 req-f433f117-4d03-481c-b8e4-c66dd4f89843 service nova] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Received event network-vif-plugged-15863009-b6e4-43a9-a2d3-14581d3905f4 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1766.012421] env[68673]: DEBUG oslo_concurrency.lockutils [req-10465503-8be6-4d90-820a-e5314c267e66 req-f433f117-4d03-481c-b8e4-c66dd4f89843 service nova] Acquiring lock "3e1b47d8-e829-416d-baca-b15e3d0d358d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.012642] env[68673]: DEBUG oslo_concurrency.lockutils [req-10465503-8be6-4d90-820a-e5314c267e66 req-f433f117-4d03-481c-b8e4-c66dd4f89843 service nova] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.012737] env[68673]: DEBUG oslo_concurrency.lockutils [req-10465503-8be6-4d90-820a-e5314c267e66 req-f433f117-4d03-481c-b8e4-c66dd4f89843 service nova] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.012903] env[68673]: DEBUG nova.compute.manager [req-10465503-8be6-4d90-820a-e5314c267e66 req-f433f117-4d03-481c-b8e4-c66dd4f89843 service nova] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] No waiting events found dispatching network-vif-plugged-15863009-b6e4-43a9-a2d3-14581d3905f4 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1766.013636] env[68673]: WARNING nova.compute.manager [req-10465503-8be6-4d90-820a-e5314c267e66 req-f433f117-4d03-481c-b8e4-c66dd4f89843 service nova] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Received unexpected event network-vif-plugged-15863009-b6e4-43a9-a2d3-14581d3905f4 for instance with vm_state building and task_state spawning. [ 1766.016372] env[68673]: DEBUG nova.network.neutron [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Successfully updated port: 15863009-b6e4-43a9-a2d3-14581d3905f4 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1766.032351] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "refresh_cache-3e1b47d8-e829-416d-baca-b15e3d0d358d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.032504] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "refresh_cache-3e1b47d8-e829-416d-baca-b15e3d0d358d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.032651] env[68673]: DEBUG nova.network.neutron [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1766.090043] env[68673]: DEBUG nova.network.neutron [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1766.278561] env[68673]: DEBUG nova.network.neutron [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Updating instance_info_cache with network_info: [{"id": "15863009-b6e4-43a9-a2d3-14581d3905f4", "address": "fa:16:3e:b5:cc:4c", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15863009-b6", "ovs_interfaceid": "15863009-b6e4-43a9-a2d3-14581d3905f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.290711] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Releasing lock "refresh_cache-3e1b47d8-e829-416d-baca-b15e3d0d358d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.290986] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Instance network_info: |[{"id": "15863009-b6e4-43a9-a2d3-14581d3905f4", "address": "fa:16:3e:b5:cc:4c", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15863009-b6", "ovs_interfaceid": "15863009-b6e4-43a9-a2d3-14581d3905f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1766.291408] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:cc:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15863009-b6e4-43a9-a2d3-14581d3905f4', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1766.299335] env[68673]: DEBUG oslo.service.loopingcall [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1766.299751] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1766.299975] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64cf8cb4-7ce1-427e-a81a-7ce6ba9646c8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.320252] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1766.320252] env[68673]: value = "task-3433593" [ 1766.320252] env[68673]: _type = "Task" [ 1766.320252] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.328463] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433593, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.830377] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433593, 'name': CreateVM_Task, 'duration_secs': 0.28513} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.830535] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1766.831149] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.831316] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.831642] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1766.831879] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fe6fe15-ad49-4595-bd47-9eb5f5792970 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.839233] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 1766.839233] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]521a71dd-a9e5-675f-9ff7-3c48c1f1756a" [ 1766.839233] env[68673]: _type = "Task" [ 1766.839233] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.847454] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]521a71dd-a9e5-675f-9ff7-3c48c1f1756a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.350402] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.350683] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1767.350837] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.049145] env[68673]: DEBUG nova.compute.manager [req-ca350bbf-551b-4dd3-b8f2-63a1276777b5 req-fe8eeab8-26e8-4e75-85b2-2335a89fcfd1 service nova] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Received event network-changed-15863009-b6e4-43a9-a2d3-14581d3905f4 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1768.049296] env[68673]: DEBUG nova.compute.manager [req-ca350bbf-551b-4dd3-b8f2-63a1276777b5 req-fe8eeab8-26e8-4e75-85b2-2335a89fcfd1 service nova] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Refreshing instance network info cache due to event network-changed-15863009-b6e4-43a9-a2d3-14581d3905f4. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1768.049485] env[68673]: DEBUG oslo_concurrency.lockutils [req-ca350bbf-551b-4dd3-b8f2-63a1276777b5 req-fe8eeab8-26e8-4e75-85b2-2335a89fcfd1 service nova] Acquiring lock "refresh_cache-3e1b47d8-e829-416d-baca-b15e3d0d358d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.049627] env[68673]: DEBUG oslo_concurrency.lockutils [req-ca350bbf-551b-4dd3-b8f2-63a1276777b5 req-fe8eeab8-26e8-4e75-85b2-2335a89fcfd1 service nova] Acquired lock "refresh_cache-3e1b47d8-e829-416d-baca-b15e3d0d358d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1768.049782] env[68673]: DEBUG nova.network.neutron [req-ca350bbf-551b-4dd3-b8f2-63a1276777b5 req-fe8eeab8-26e8-4e75-85b2-2335a89fcfd1 service nova] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Refreshing network info cache for port 15863009-b6e4-43a9-a2d3-14581d3905f4 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1768.379146] env[68673]: DEBUG nova.network.neutron [req-ca350bbf-551b-4dd3-b8f2-63a1276777b5 req-fe8eeab8-26e8-4e75-85b2-2335a89fcfd1 service nova] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Updated VIF entry in instance network info cache for port 15863009-b6e4-43a9-a2d3-14581d3905f4. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1768.379644] env[68673]: DEBUG nova.network.neutron [req-ca350bbf-551b-4dd3-b8f2-63a1276777b5 req-fe8eeab8-26e8-4e75-85b2-2335a89fcfd1 service nova] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Updating instance_info_cache with network_info: [{"id": "15863009-b6e4-43a9-a2d3-14581d3905f4", "address": "fa:16:3e:b5:cc:4c", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15863009-b6", "ovs_interfaceid": "15863009-b6e4-43a9-a2d3-14581d3905f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.389633] env[68673]: DEBUG oslo_concurrency.lockutils [req-ca350bbf-551b-4dd3-b8f2-63a1276777b5 req-fe8eeab8-26e8-4e75-85b2-2335a89fcfd1 service nova] Releasing lock "refresh_cache-3e1b47d8-e829-416d-baca-b15e3d0d358d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.799176] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.779426] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.783049] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.783162] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.783210] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1792.784671] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.785036] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1792.785036] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1792.808950] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.809263] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.809493] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.809708] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.809921] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.810149] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.810358] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.810560] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.810781] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.810994] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1792.811215] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1795.784256] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.805692] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.806043] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.816360] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.816564] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.816727] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.816879] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1795.817931] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7794ef2d-f97a-4912-a862-4508e2067fd9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.827757] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ce485e-1633-42f6-b126-f73067a826eb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.841758] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe4bb57-a7f0-44a8-b24d-3c86200e5511 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.847791] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfe3363-79ca-46a4-a442-d7529960982b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.876158] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180930MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1795.876295] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.876482] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.023239] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance bb290679-267b-4dc2-8337-896d5208c6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.023420] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.023549] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.023669] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.023789] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.023910] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.024035] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.024157] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.024272] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.024385] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1796.035681] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1796.046138] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1796.046358] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1796.046547] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1796.061841] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing inventories for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1796.074987] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Updating ProviderTree inventory for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1796.075176] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Updating inventory in ProviderTree for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1796.084751] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing aggregate associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, aggregates: None {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1796.101750] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing trait associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1796.238084] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc52e68-a6f1-49de-87de-ecac689ee60c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.245736] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008b55a6-4998-478c-a5d2-443d6463c324 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.276279] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16d5cc3-5983-43f7-ab3a-48bac847eaed {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.284044] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e511bf37-a1dc-4817-bf25-549ab858c362 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.297080] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.305707] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1796.319333] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1796.319513] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.443s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.297581] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.783315] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1800.784100] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1802.791630] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1804.784320] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1804.784630] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances with incomplete migration {{(pid=68673) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1806.793036] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1806.793036] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1806.802813] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] There are 0 instances to clean {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1809.056550] env[68673]: WARNING oslo_vmware.rw_handles [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1809.056550] env[68673]: ERROR oslo_vmware.rw_handles [ 1809.057468] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1809.059385] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1809.059673] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Copying Virtual Disk [datastore1] vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/d824ed30-1e00-44bb-8ece-c3061cfda19d/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1809.059995] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3581f07-8327-41f7-95d5-8918cf054be2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.068423] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 1809.068423] env[68673]: value = "task-3433594" [ 1809.068423] env[68673]: _type = "Task" [ 1809.068423] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.079272] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': task-3433594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.578563] env[68673]: DEBUG oslo_vmware.exceptions [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1809.578861] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.579434] env[68673]: ERROR nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1809.579434] env[68673]: Faults: ['InvalidArgument'] [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Traceback (most recent call last): [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] yield resources [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] self.driver.spawn(context, instance, image_meta, [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] self._fetch_image_if_missing(context, vi) [ 1809.579434] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] image_cache(vi, tmp_image_ds_loc) [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] vm_util.copy_virtual_disk( [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] session._wait_for_task(vmdk_copy_task) [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] return self.wait_for_task(task_ref) [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] return evt.wait() [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] result = hub.switch() [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1809.579806] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] return self.greenlet.switch() [ 1809.580176] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1809.580176] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] self.f(*self.args, **self.kw) [ 1809.580176] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1809.580176] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] raise exceptions.translate_fault(task_info.error) [ 1809.580176] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1809.580176] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Faults: ['InvalidArgument'] [ 1809.580176] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] [ 1809.580176] env[68673]: INFO nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Terminating instance [ 1809.581361] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.582021] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1809.582021] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-524c038f-30fd-4613-a06e-5ab82e0cf926 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.585033] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1809.585250] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1809.585948] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6214f8-010f-41c3-8895-bc4e8834f51e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.592380] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1809.592683] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d1fe6b0-3616-40a6-951d-e6ef90d71c07 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.594759] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1809.594917] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1809.595963] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-264cf902-5db4-4180-ac41-cdf62f2e95f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.601496] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Waiting for the task: (returnval){ [ 1809.601496] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]521577ff-ba48-8e0c-65bd-6dd76fc01aab" [ 1809.601496] env[68673]: _type = "Task" [ 1809.601496] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.607790] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]521577ff-ba48-8e0c-65bd-6dd76fc01aab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.659730] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1809.659999] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1809.660207] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Deleting the datastore file [datastore1] bb290679-267b-4dc2-8337-896d5208c6cd {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1809.660500] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d1b7be9-1ca7-43ac-b586-b1fdcca6c4d8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.666908] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 1809.666908] env[68673]: value = "task-3433596" [ 1809.666908] env[68673]: _type = "Task" [ 1809.666908] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.675597] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': task-3433596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.111506] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1810.111808] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Creating directory with path [datastore1] vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1810.112019] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c939a6a-67e8-4797-a1b4-fc8e786c7544 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.123079] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Created directory with path [datastore1] vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1810.123079] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Fetch image to [datastore1] vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1810.123079] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1810.123707] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dce45d-08f0-4395-999d-0e33250d1bfe {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.130061] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8880bc-9737-49a5-b783-847fe86b4c52 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.138712] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3c2848-1edf-4c44-8ff6-e8b9e9c659d8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.171495] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d194705-5deb-4563-9fe0-8306f6d722ed {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.178362] env[68673]: DEBUG oslo_vmware.api [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': task-3433596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077742} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.179787] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1810.179973] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1810.180164] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1810.180341] env[68673]: INFO nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1810.182513] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6735a42f-b09c-416a-a556-f62a2cb4762c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.184401] env[68673]: DEBUG nova.compute.claims [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1810.184570] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.184794] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.205849] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1810.256761] env[68673]: DEBUG oslo_vmware.rw_handles [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1810.317593] env[68673]: DEBUG oslo_vmware.rw_handles [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1810.317790] env[68673]: DEBUG oslo_vmware.rw_handles [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1810.414081] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfa0752-007e-4b9c-af5c-afc0bd89b417 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.421241] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2663be-bde5-4962-9dc9-f13a6ec13764 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.450186] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69200ac3-83eb-447d-a315-2e61730747da {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.456647] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ae6fb9-3e07-4e34-9fd7-c3c0f7793c82 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.469409] env[68673]: DEBUG nova.compute.provider_tree [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1810.477490] env[68673]: DEBUG nova.scheduler.client.report [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1810.491762] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.307s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.492342] env[68673]: ERROR nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1810.492342] env[68673]: Faults: ['InvalidArgument'] [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Traceback (most recent call last): [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] self.driver.spawn(context, instance, image_meta, [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] self._fetch_image_if_missing(context, vi) [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] image_cache(vi, tmp_image_ds_loc) [ 1810.492342] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] vm_util.copy_virtual_disk( [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] session._wait_for_task(vmdk_copy_task) [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] return self.wait_for_task(task_ref) [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] return evt.wait() [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] result = hub.switch() [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] return self.greenlet.switch() [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1810.492689] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] self.f(*self.args, **self.kw) [ 1810.493163] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1810.493163] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] raise exceptions.translate_fault(task_info.error) [ 1810.493163] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1810.493163] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Faults: ['InvalidArgument'] [ 1810.493163] env[68673]: ERROR nova.compute.manager [instance: bb290679-267b-4dc2-8337-896d5208c6cd] [ 1810.493163] env[68673]: DEBUG nova.compute.utils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1810.494484] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Build of instance bb290679-267b-4dc2-8337-896d5208c6cd was re-scheduled: A specified parameter was not correct: fileType [ 1810.494484] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1810.494851] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1810.495041] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1810.495217] env[68673]: DEBUG nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1810.495382] env[68673]: DEBUG nova.network.neutron [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1810.846964] env[68673]: DEBUG nova.network.neutron [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.856823] env[68673]: INFO nova.compute.manager [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Took 0.36 seconds to deallocate network for instance. [ 1810.945350] env[68673]: INFO nova.scheduler.client.report [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Deleted allocations for instance bb290679-267b-4dc2-8337-896d5208c6cd [ 1810.970786] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9bc46789-d6a0-42dc-9803-344a58b3936d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "bb290679-267b-4dc2-8337-896d5208c6cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.279s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.971701] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "bb290679-267b-4dc2-8337-896d5208c6cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.971959] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "bb290679-267b-4dc2-8337-896d5208c6cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.972252] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "bb290679-267b-4dc2-8337-896d5208c6cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.973122] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "bb290679-267b-4dc2-8337-896d5208c6cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.975760] env[68673]: INFO nova.compute.manager [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Terminating instance [ 1810.977841] env[68673]: DEBUG nova.compute.manager [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1810.978813] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1810.978813] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2228636-c29c-45c3-8d2d-2e26d483e3e3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.991024] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87eab2b1-47ec-4655-b6f4-02d16c5cd02f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.001031] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1811.023027] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bb290679-267b-4dc2-8337-896d5208c6cd could not be found. [ 1811.023247] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1811.023429] env[68673]: INFO nova.compute.manager [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1811.023688] env[68673]: DEBUG oslo.service.loopingcall [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1811.024197] env[68673]: DEBUG nova.compute.manager [-] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1811.024197] env[68673]: DEBUG nova.network.neutron [-] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1811.047209] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.047456] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.048993] env[68673]: INFO nova.compute.claims [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1811.058548] env[68673]: DEBUG nova.network.neutron [-] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.081021] env[68673]: INFO nova.compute.manager [-] [instance: bb290679-267b-4dc2-8337-896d5208c6cd] Took 0.06 seconds to deallocate network for instance. [ 1811.161667] env[68673]: DEBUG oslo_concurrency.lockutils [None req-c42e463c-5bbe-4fc3-be61-a03859fcbe6e tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "bb290679-267b-4dc2-8337-896d5208c6cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.227479] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d5d80a-f11d-4650-9345-6b61c93e2979 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.234534] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc41b4e9-ee88-4372-bc97-f9eee8533f29 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.265851] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95abaea0-0531-4e61-9fd7-85f98a5086ad {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.272778] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaa0743-b4b0-4915-bad5-52b0c7d5608e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.285770] env[68673]: DEBUG nova.compute.provider_tree [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.295242] env[68673]: DEBUG nova.scheduler.client.report [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1811.310767] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.311242] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1811.343854] env[68673]: DEBUG nova.compute.utils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1811.345338] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1811.345439] env[68673]: DEBUG nova.network.neutron [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1811.354013] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1811.398146] env[68673]: DEBUG nova.policy [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '920b26b61d494f269c05579970187435', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70890a0ed0804a319a83a84711371cee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1811.413232] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1811.436871] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1811.437129] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1811.437296] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1811.437523] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1811.437704] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1811.437862] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1811.438088] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1811.438253] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1811.438423] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1811.438587] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1811.438756] env[68673]: DEBUG nova.virt.hardware [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1811.439621] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398f8ad6-6dde-427f-a8aa-a2f1fd88ca64 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.446998] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a000ff70-9a17-493d-aa38-ffbb99059894 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.777737] env[68673]: DEBUG nova.network.neutron [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Successfully created port: 94dc0111-1a06-4e91-b150-5b4924c5c1e9 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1812.494480] env[68673]: DEBUG nova.network.neutron [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Successfully updated port: 94dc0111-1a06-4e91-b150-5b4924c5c1e9 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1812.508811] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "refresh_cache-db978196-4b5b-4d15-84c6-5e1f355d0e75" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.508960] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquired lock "refresh_cache-db978196-4b5b-4d15-84c6-5e1f355d0e75" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.509130] env[68673]: DEBUG nova.network.neutron [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1812.549056] env[68673]: DEBUG nova.network.neutron [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1812.713564] env[68673]: DEBUG nova.network.neutron [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Updating instance_info_cache with network_info: [{"id": "94dc0111-1a06-4e91-b150-5b4924c5c1e9", "address": "fa:16:3e:df:36:9e", "network": {"id": "4438a4bf-651c-4e90-9701-c12346e7119c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1742834654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70890a0ed0804a319a83a84711371cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94dc0111-1a", "ovs_interfaceid": "94dc0111-1a06-4e91-b150-5b4924c5c1e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.725970] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Releasing lock "refresh_cache-db978196-4b5b-4d15-84c6-5e1f355d0e75" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.726256] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Instance network_info: |[{"id": "94dc0111-1a06-4e91-b150-5b4924c5c1e9", "address": "fa:16:3e:df:36:9e", "network": {"id": "4438a4bf-651c-4e90-9701-c12346e7119c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1742834654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70890a0ed0804a319a83a84711371cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94dc0111-1a", "ovs_interfaceid": "94dc0111-1a06-4e91-b150-5b4924c5c1e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1812.726646] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:36:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94dc0111-1a06-4e91-b150-5b4924c5c1e9', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1812.734229] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Creating folder: Project (70890a0ed0804a319a83a84711371cee). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1812.734728] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-828ee2c4-a673-4e5d-83eb-450380283981 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.746133] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Created folder: Project (70890a0ed0804a319a83a84711371cee) in parent group-v685311. [ 1812.746295] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Creating folder: Instances. Parent ref: group-v685410. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1812.746515] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14fc521b-3498-4a9e-abb4-4eca4ae44809 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.754978] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Created folder: Instances in parent group-v685410. [ 1812.755222] env[68673]: DEBUG oslo.service.loopingcall [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1812.755393] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1812.755674] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be3589e8-f1af-4886-bcf8-e64c90bf941a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.774398] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1812.774398] env[68673]: value = "task-3433599" [ 1812.774398] env[68673]: _type = "Task" [ 1812.774398] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.782617] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433599, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.902314] env[68673]: DEBUG nova.compute.manager [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Received event network-vif-plugged-94dc0111-1a06-4e91-b150-5b4924c5c1e9 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1812.902394] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] Acquiring lock "db978196-4b5b-4d15-84c6-5e1f355d0e75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.902596] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.902767] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.902933] env[68673]: DEBUG nova.compute.manager [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] No waiting events found dispatching network-vif-plugged-94dc0111-1a06-4e91-b150-5b4924c5c1e9 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1812.903251] env[68673]: WARNING nova.compute.manager [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Received unexpected event network-vif-plugged-94dc0111-1a06-4e91-b150-5b4924c5c1e9 for instance with vm_state building and task_state spawning. [ 1812.903251] env[68673]: DEBUG nova.compute.manager [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Received event network-changed-94dc0111-1a06-4e91-b150-5b4924c5c1e9 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1812.903403] env[68673]: DEBUG nova.compute.manager [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Refreshing instance network info cache due to event network-changed-94dc0111-1a06-4e91-b150-5b4924c5c1e9. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1812.903577] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] Acquiring lock "refresh_cache-db978196-4b5b-4d15-84c6-5e1f355d0e75" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.903741] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] Acquired lock "refresh_cache-db978196-4b5b-4d15-84c6-5e1f355d0e75" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.903849] env[68673]: DEBUG nova.network.neutron [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Refreshing network info cache for port 94dc0111-1a06-4e91-b150-5b4924c5c1e9 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1813.270556] env[68673]: DEBUG nova.network.neutron [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Updated VIF entry in instance network info cache for port 94dc0111-1a06-4e91-b150-5b4924c5c1e9. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1813.270926] env[68673]: DEBUG nova.network.neutron [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Updating instance_info_cache with network_info: [{"id": "94dc0111-1a06-4e91-b150-5b4924c5c1e9", "address": "fa:16:3e:df:36:9e", "network": {"id": "4438a4bf-651c-4e90-9701-c12346e7119c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1742834654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70890a0ed0804a319a83a84711371cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94dc0111-1a", "ovs_interfaceid": "94dc0111-1a06-4e91-b150-5b4924c5c1e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.280146] env[68673]: DEBUG oslo_concurrency.lockutils [req-bf37c2c6-6a00-4e29-8465-91a6d82b36bb req-95681d45-fa24-4c42-a8af-fe92a168a817 service nova] Releasing lock "refresh_cache-db978196-4b5b-4d15-84c6-5e1f355d0e75" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.284759] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433599, 'name': CreateVM_Task, 'duration_secs': 0.284975} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.284759] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1813.284759] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.284759] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.285109] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1813.285147] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb5c5794-6e79-46e1-a6be-e5b2a57787c6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.289424] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Waiting for the task: (returnval){ [ 1813.289424] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]527622a8-08f6-db6d-b3b8-2b37df8c8fdf" [ 1813.289424] env[68673]: _type = "Task" [ 1813.289424] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.298097] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]527622a8-08f6-db6d-b3b8-2b37df8c8fdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.799755] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.800127] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1813.800266] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.936331] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "0dd8e45a-d77a-4c9b-a733-353fce754549" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.936645] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "0dd8e45a-d77a-4c9b-a733-353fce754549" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.079269] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1829.100818] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Getting list of instances from cluster (obj){ [ 1829.100818] env[68673]: value = "domain-c8" [ 1829.100818] env[68673]: _type = "ClusterComputeResource" [ 1829.100818] env[68673]: } {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1829.102181] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b85382-8285-40c5-895b-d9df370414ca {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.119052] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Got total of 10 instances {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1829.119192] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 76697868-c920-43d0-ab11-fcdff2e38dc1 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.119395] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid f1f37193-2ef8-43aa-bde4-98399ce3f955 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.119720] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 218a1129-966d-4512-8b4b-222d31ceb106 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.119720] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid c6f7698c-3a1d-47e7-aeac-fd0e50376a39 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.119857] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 601dfed1-fb7b-413a-836d-7fda61314c73 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.119988] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 2a8badf2-c080-46dc-be89-4c73bb88cc01 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.120157] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 3352e87c-38dd-4bfa-937c-644abc30cf76 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.120341] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid d79f254c-7c00-4cf8-85ac-6db513533da3 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.120473] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 3e1b47d8-e829-416d-baca-b15e3d0d358d {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.120621] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid db978196-4b5b-4d15-84c6-5e1f355d0e75 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1829.120937] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "76697868-c920-43d0-ab11-fcdff2e38dc1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.121200] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.121411] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "218a1129-966d-4512-8b4b-222d31ceb106" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.121638] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.121834] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "601dfed1-fb7b-413a-836d-7fda61314c73" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.122088] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.122229] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "3352e87c-38dd-4bfa-937c-644abc30cf76" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.122422] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "d79f254c-7c00-4cf8-85ac-6db513533da3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.122620] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.122820] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.822211] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.822544] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1851.783366] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1851.783540] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1854.784916] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.785337] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1854.785337] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1854.806072] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.806230] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.806370] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.806472] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.806595] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.806716] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.806836] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.806955] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.807085] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.807205] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1854.807324] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1856.112827] env[68673]: WARNING oslo_vmware.rw_handles [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1856.112827] env[68673]: ERROR oslo_vmware.rw_handles [ 1856.113484] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1856.115342] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1856.115577] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Copying Virtual Disk [datastore1] vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/c6f07f8a-1446-4543-bd09-3b61a4e1cbbf/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1856.115852] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6f1e86d-e205-4455-b1ff-b692d512040c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.124056] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Waiting for the task: (returnval){ [ 1856.124056] env[68673]: value = "task-3433600" [ 1856.124056] env[68673]: _type = "Task" [ 1856.124056] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.131497] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Task: {'id': task-3433600, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.634217] env[68673]: DEBUG oslo_vmware.exceptions [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1856.634502] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.635071] env[68673]: ERROR nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1856.635071] env[68673]: Faults: ['InvalidArgument'] [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Traceback (most recent call last): [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] yield resources [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] self.driver.spawn(context, instance, image_meta, [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] self._fetch_image_if_missing(context, vi) [ 1856.635071] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] image_cache(vi, tmp_image_ds_loc) [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] vm_util.copy_virtual_disk( [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] session._wait_for_task(vmdk_copy_task) [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] return self.wait_for_task(task_ref) [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] return evt.wait() [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] result = hub.switch() [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1856.635451] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] return self.greenlet.switch() [ 1856.635908] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1856.635908] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] self.f(*self.args, **self.kw) [ 1856.635908] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1856.635908] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] raise exceptions.translate_fault(task_info.error) [ 1856.635908] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1856.635908] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Faults: ['InvalidArgument'] [ 1856.635908] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] [ 1856.635908] env[68673]: INFO nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Terminating instance [ 1856.636892] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.637117] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1856.637351] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12648056-33dc-4d9d-b1e7-ca63d9a16549 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.639700] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1856.639892] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1856.640606] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d03a7a4-25cb-49c4-9304-115c62ee485d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.647309] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1856.647525] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c265207d-25ac-4589-8ae1-99eec727fdc5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.649650] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1856.649824] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1856.650811] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1165ce0b-5fb9-4252-b0ce-1a0606e12e9f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.655404] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for the task: (returnval){ [ 1856.655404] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5263728b-124f-31f1-fe45-b3e576c8bc55" [ 1856.655404] env[68673]: _type = "Task" [ 1856.655404] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.662161] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5263728b-124f-31f1-fe45-b3e576c8bc55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.782935] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1856.783206] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1856.795152] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.795374] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.795541] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.795696] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1856.796779] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93bb928-b583-4928-9c15-8550de76521b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.805089] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08b02de-5f94-4d7c-9546-3ba4d6a429ee {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.818336] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5167aae-e179-4ba2-b77c-564a2f611d4a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.824194] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cd0d61-2739-4953-8206-cbfb90e8c3f2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.853353] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180877MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1856.853455] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.853640] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.921655] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.921814] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.921956] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.922075] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.922197] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.922312] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.922455] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.922557] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.922670] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.922779] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1856.932813] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1856.942901] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1856.943121] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1856.943264] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1857.074213] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e3ae7b-2bd7-4dd3-992b-5a0cdca17a65 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.081774] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57bbfa9-d820-473f-bf34-3750a9170673 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.110824] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5b11ec-cb0c-4f18-95f5-b53e43d5e100 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.117473] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31871ef-f34e-4768-83f3-f52fdde40cf9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.129940] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1857.137854] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1857.152066] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1857.152249] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.299s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.165101] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1857.165349] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Creating directory with path [datastore1] vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1857.165826] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d50b5d43-03e5-47aa-8049-0365c09628af {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.177397] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Created directory with path [datastore1] vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1857.177591] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Fetch image to [datastore1] vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1857.177760] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1857.178504] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39b3703-2632-4965-805b-11a818689d4c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.185060] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e5df08-1263-45fb-bb21-61d109a3e486 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.194380] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f9e34f-908c-4fd8-9db2-50bb48594807 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.224759] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10239776-6fac-4f2e-955d-bba50eafbd7c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.229937] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3b32a966-7f82-4af2-9885-df09ba692276 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.261778] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1857.267212] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1857.267416] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1857.267586] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Deleting the datastore file [datastore1] 76697868-c920-43d0-ab11-fcdff2e38dc1 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1857.267843] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aad9f664-f6d7-4104-ac59-9e4a9d422505 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.273961] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Waiting for the task: (returnval){ [ 1857.273961] env[68673]: value = "task-3433602" [ 1857.273961] env[68673]: _type = "Task" [ 1857.273961] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.281301] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Task: {'id': task-3433602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.395060] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1857.455412] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1857.455708] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1857.783961] env[68673]: DEBUG oslo_vmware.api [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Task: {'id': task-3433602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084004} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.784236] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1857.784402] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1857.785058] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1857.785058] env[68673]: INFO nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1857.786833] env[68673]: DEBUG nova.compute.claims [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1857.787012] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.787226] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.971524] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce115276-0ffd-45a7-8232-1b140224d90e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.978784] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcbe7d9-3ad9-40c6-bf9b-48d32c4ea265 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.009373] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bd5a82-3f78-4cc2-98a5-07583f29bd2b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.016102] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de324c8f-1019-42a0-ae7f-7bad8a161aa0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.028637] env[68673]: DEBUG nova.compute.provider_tree [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1858.037006] env[68673]: DEBUG nova.scheduler.client.report [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1858.051728] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.264s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.052275] env[68673]: ERROR nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1858.052275] env[68673]: Faults: ['InvalidArgument'] [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Traceback (most recent call last): [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] self.driver.spawn(context, instance, image_meta, [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] self._fetch_image_if_missing(context, vi) [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] image_cache(vi, tmp_image_ds_loc) [ 1858.052275] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] vm_util.copy_virtual_disk( [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] session._wait_for_task(vmdk_copy_task) [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] return self.wait_for_task(task_ref) [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] return evt.wait() [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] result = hub.switch() [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] return self.greenlet.switch() [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1858.052713] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] self.f(*self.args, **self.kw) [ 1858.053164] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1858.053164] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] raise exceptions.translate_fault(task_info.error) [ 1858.053164] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1858.053164] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Faults: ['InvalidArgument'] [ 1858.053164] env[68673]: ERROR nova.compute.manager [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] [ 1858.053164] env[68673]: DEBUG nova.compute.utils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1858.054412] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Build of instance 76697868-c920-43d0-ab11-fcdff2e38dc1 was re-scheduled: A specified parameter was not correct: fileType [ 1858.054412] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1858.054785] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1858.054960] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1858.055145] env[68673]: DEBUG nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1858.055309] env[68673]: DEBUG nova.network.neutron [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1858.153546] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.153819] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.641645] env[68673]: DEBUG nova.network.neutron [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.653826] env[68673]: INFO nova.compute.manager [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Took 0.60 seconds to deallocate network for instance. [ 1858.747635] env[68673]: INFO nova.scheduler.client.report [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Deleted allocations for instance 76697868-c920-43d0-ab11-fcdff2e38dc1 [ 1858.771807] env[68673]: DEBUG oslo_concurrency.lockutils [None req-27885285-7ad7-44ed-8652-d6a0ccbe24ac tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 611.170s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.772998] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 414.697s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.773237] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Acquiring lock "76697868-c920-43d0-ab11-fcdff2e38dc1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.773438] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.773604] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.775630] env[68673]: INFO nova.compute.manager [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Terminating instance [ 1858.777657] env[68673]: DEBUG nova.compute.manager [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1858.778026] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1858.778654] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e48f490d-af25-4542-983f-8d0e91da416f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.788671] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d45ce6-d943-42c5-9b6e-2663d0e22521 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.799452] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1858.821117] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 76697868-c920-43d0-ab11-fcdff2e38dc1 could not be found. [ 1858.821668] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1858.821668] env[68673]: INFO nova.compute.manager [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1858.822556] env[68673]: DEBUG oslo.service.loopingcall [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1858.822556] env[68673]: DEBUG nova.compute.manager [-] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1858.822556] env[68673]: DEBUG nova.network.neutron [-] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1858.845799] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.846142] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.847556] env[68673]: INFO nova.compute.claims [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1858.850299] env[68673]: DEBUG nova.network.neutron [-] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.857943] env[68673]: INFO nova.compute.manager [-] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] Took 0.04 seconds to deallocate network for instance. [ 1858.961827] env[68673]: DEBUG oslo_concurrency.lockutils [None req-1362bc68-98a9-46ab-ba86-9fcd4bc98e54 tempest-ServerAddressesTestJSON-320864406 tempest-ServerAddressesTestJSON-320864406-project-member] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.963501] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 29.842s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.963501] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 76697868-c920-43d0-ab11-fcdff2e38dc1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1858.963633] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "76697868-c920-43d0-ab11-fcdff2e38dc1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.044219] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b057ea98-540f-423c-891a-a8b21cfd81df {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.052610] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be867f9e-203d-4a66-bf9c-7dd5de97d5ed {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.082047] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec5fa67-1cc1-4477-99e9-842e4bcd0c97 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.088802] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d758da-daaf-4da9-969b-05b5b0b9ba3e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.101596] env[68673]: DEBUG nova.compute.provider_tree [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.110014] env[68673]: DEBUG nova.scheduler.client.report [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1859.124246] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.278s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.124694] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1859.156019] env[68673]: DEBUG nova.compute.utils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1859.157159] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1859.157332] env[68673]: DEBUG nova.network.neutron [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1859.166593] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1859.218201] env[68673]: DEBUG nova.policy [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10987b1d09db49219525f9846638c55e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eaf47d7aa4b64fa58f8c1b3f1fcc02df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1859.227069] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1859.251721] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1859.252041] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1859.252214] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1859.252400] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1859.252564] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1859.252718] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1859.252921] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1859.253090] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1859.253260] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1859.253426] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1859.253593] env[68673]: DEBUG nova.virt.hardware [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1859.254489] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1a6fe6-d6d8-4691-a678-f1b8e6057827 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.262466] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8441ce-7335-4333-a83a-327ffee3d59b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.508077] env[68673]: DEBUG nova.network.neutron [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Successfully created port: d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1860.259645] env[68673]: DEBUG nova.network.neutron [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Successfully updated port: d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1860.282164] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "refresh_cache-16320084-1a09-493a-8ff2-389da64b92a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.282640] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "refresh_cache-16320084-1a09-493a-8ff2-389da64b92a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.282640] env[68673]: DEBUG nova.network.neutron [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1860.345103] env[68673]: DEBUG nova.network.neutron [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1860.499543] env[68673]: DEBUG nova.network.neutron [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Updating instance_info_cache with network_info: [{"id": "d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567", "address": "fa:16:3e:12:07:08", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd15a1ff5-8c", "ovs_interfaceid": "d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.513057] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "refresh_cache-16320084-1a09-493a-8ff2-389da64b92a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.513335] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Instance network_info: |[{"id": "d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567", "address": "fa:16:3e:12:07:08", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd15a1ff5-8c", "ovs_interfaceid": "d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1860.513731] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:07:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd15a1ff5-8c2f-4052-ad3c-edaf6c6b5567', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1860.521326] env[68673]: DEBUG oslo.service.loopingcall [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1860.521819] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1860.522057] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de0fbd09-0ee4-4502-9e55-4cb239812b5a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.541615] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1860.541615] env[68673]: value = "task-3433603" [ 1860.541615] env[68673]: _type = "Task" [ 1860.541615] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.548920] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433603, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.678788] env[68673]: DEBUG nova.compute.manager [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Received event network-vif-plugged-d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1860.679035] env[68673]: DEBUG oslo_concurrency.lockutils [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] Acquiring lock "16320084-1a09-493a-8ff2-389da64b92a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.679235] env[68673]: DEBUG oslo_concurrency.lockutils [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] Lock "16320084-1a09-493a-8ff2-389da64b92a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.679403] env[68673]: DEBUG oslo_concurrency.lockutils [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] Lock "16320084-1a09-493a-8ff2-389da64b92a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.679568] env[68673]: DEBUG nova.compute.manager [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] No waiting events found dispatching network-vif-plugged-d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1860.679781] env[68673]: WARNING nova.compute.manager [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Received unexpected event network-vif-plugged-d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567 for instance with vm_state building and task_state spawning. [ 1860.679878] env[68673]: DEBUG nova.compute.manager [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Received event network-changed-d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1860.680035] env[68673]: DEBUG nova.compute.manager [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Refreshing instance network info cache due to event network-changed-d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1860.680223] env[68673]: DEBUG oslo_concurrency.lockutils [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] Acquiring lock "refresh_cache-16320084-1a09-493a-8ff2-389da64b92a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.680358] env[68673]: DEBUG oslo_concurrency.lockutils [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] Acquired lock "refresh_cache-16320084-1a09-493a-8ff2-389da64b92a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.680512] env[68673]: DEBUG nova.network.neutron [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Refreshing network info cache for port d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1860.974249] env[68673]: DEBUG nova.network.neutron [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Updated VIF entry in instance network info cache for port d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1860.974623] env[68673]: DEBUG nova.network.neutron [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Updating instance_info_cache with network_info: [{"id": "d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567", "address": "fa:16:3e:12:07:08", "network": {"id": "464729d2-e9d7-40d4-af9f-0bee51a32354", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1832341621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf47d7aa4b64fa58f8c1b3f1fcc02df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd15a1ff5-8c", "ovs_interfaceid": "d15a1ff5-8c2f-4052-ad3c-edaf6c6b5567", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.983852] env[68673]: DEBUG oslo_concurrency.lockutils [req-4569e114-4578-4616-98ee-444ad389a426 req-d646b9da-5571-42a9-8f28-8997d6609072 service nova] Releasing lock "refresh_cache-16320084-1a09-493a-8ff2-389da64b92a2" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.051912] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433603, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.552528] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433603, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.054897] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433603, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.553967] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433603, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.783401] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.055051] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433603, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.555860] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433603, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.056053] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433603, 'name': CreateVM_Task, 'duration_secs': 3.25736} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.056053] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1864.056701] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.056866] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.057215] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1864.057458] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41806885-7429-4918-805b-ca74a5630be2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.061704] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 1864.061704] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52998e74-2f7e-116c-e760-4d1edc481b71" [ 1864.061704] env[68673]: _type = "Task" [ 1864.061704] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.068965] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52998e74-2f7e-116c-e760-4d1edc481b71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.572113] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.572404] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1864.572586] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.578836] env[68673]: DEBUG oslo_concurrency.lockutils [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.618106] env[68673]: WARNING oslo_vmware.rw_handles [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1906.618106] env[68673]: ERROR oslo_vmware.rw_handles [ 1906.618815] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1906.620851] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1906.621141] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Copying Virtual Disk [datastore1] vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/6229079f-adab-4326-a4f5-ac502ee28303/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1906.621462] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95f12cad-f390-4970-9789-be4f1222f01c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.629819] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for the task: (returnval){ [ 1906.629819] env[68673]: value = "task-3433604" [ 1906.629819] env[68673]: _type = "Task" [ 1906.629819] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.637370] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': task-3433604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.140148] env[68673]: DEBUG oslo_vmware.exceptions [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1907.140386] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.140928] env[68673]: ERROR nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1907.140928] env[68673]: Faults: ['InvalidArgument'] [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Traceback (most recent call last): [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] yield resources [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] self.driver.spawn(context, instance, image_meta, [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] self._fetch_image_if_missing(context, vi) [ 1907.140928] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] image_cache(vi, tmp_image_ds_loc) [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] vm_util.copy_virtual_disk( [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] session._wait_for_task(vmdk_copy_task) [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] return self.wait_for_task(task_ref) [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] return evt.wait() [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] result = hub.switch() [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1907.141349] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] return self.greenlet.switch() [ 1907.141795] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1907.141795] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] self.f(*self.args, **self.kw) [ 1907.141795] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1907.141795] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] raise exceptions.translate_fault(task_info.error) [ 1907.141795] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1907.141795] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Faults: ['InvalidArgument'] [ 1907.141795] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] [ 1907.141795] env[68673]: INFO nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Terminating instance [ 1907.142768] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.142988] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1907.143229] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81be1b72-0a18-4dc1-82a6-a444d946b3d4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.145286] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1907.145472] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1907.146176] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f759dd01-1e02-4f4b-aff8-cf746c2ebe9c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.153718] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1907.153951] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f941302b-159b-4da3-b07a-44d9c42bc08e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.156082] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1907.156256] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1907.157200] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-465df591-c9e6-41c8-8da8-40dbf948397b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.161916] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Waiting for the task: (returnval){ [ 1907.161916] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52784621-7132-e993-cea2-3e361ba368fe" [ 1907.161916] env[68673]: _type = "Task" [ 1907.161916] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.170067] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52784621-7132-e993-cea2-3e361ba368fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.226123] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1907.226354] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1907.226534] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Deleting the datastore file [datastore1] f1f37193-2ef8-43aa-bde4-98399ce3f955 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1907.228035] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4226b6a3-361e-4bb3-9738-0b498c4dbd4c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.232301] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for the task: (returnval){ [ 1907.232301] env[68673]: value = "task-3433606" [ 1907.232301] env[68673]: _type = "Task" [ 1907.232301] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.240070] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': task-3433606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.672076] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1907.672439] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Creating directory with path [datastore1] vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1907.672589] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-708a4730-9930-4405-a246-7bb3ce6c1d03 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.683683] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Created directory with path [datastore1] vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1907.683889] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Fetch image to [datastore1] vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1907.684050] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1907.684761] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7558bed7-b044-4603-bb3e-d4051f23b577 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.691402] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96839607-8fe8-4a16-8e5a-37a8ec477d12 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.700236] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76663473-afd9-4201-8f45-93bbf79aaab2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.731057] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397fb5b4-d352-4d0b-8a5f-3067d5d3861d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.741244] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e10024ea-ef26-4c15-ae21-5de91259a730 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.742876] env[68673]: DEBUG oslo_vmware.api [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Task: {'id': task-3433606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064718} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.743128] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1907.743307] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1907.743479] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1907.743677] env[68673]: INFO nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1907.745794] env[68673]: DEBUG nova.compute.claims [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1907.746008] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.746239] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.763995] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1907.843133] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1907.905090] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1907.905284] env[68673]: DEBUG oslo_vmware.rw_handles [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1907.978420] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b926a714-f3c8-472d-9291-a9835681c19b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.986040] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca985df-860c-4847-a4b8-66561765dbb8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.015990] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5fca68-4a41-4c10-9a43-0ef687cf8f65 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.021919] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f1d478-6f75-4b95-9789-8caddd3f14bf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.034719] env[68673]: DEBUG nova.compute.provider_tree [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1908.044437] env[68673]: DEBUG nova.scheduler.client.report [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1908.058104] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.312s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.058643] env[68673]: ERROR nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1908.058643] env[68673]: Faults: ['InvalidArgument'] [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Traceback (most recent call last): [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] self.driver.spawn(context, instance, image_meta, [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] self._fetch_image_if_missing(context, vi) [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] image_cache(vi, tmp_image_ds_loc) [ 1908.058643] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] vm_util.copy_virtual_disk( [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] session._wait_for_task(vmdk_copy_task) [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] return self.wait_for_task(task_ref) [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] return evt.wait() [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] result = hub.switch() [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] return self.greenlet.switch() [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1908.059030] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] self.f(*self.args, **self.kw) [ 1908.059427] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1908.059427] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] raise exceptions.translate_fault(task_info.error) [ 1908.059427] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1908.059427] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Faults: ['InvalidArgument'] [ 1908.059427] env[68673]: ERROR nova.compute.manager [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] [ 1908.059427] env[68673]: DEBUG nova.compute.utils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1908.060694] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Build of instance f1f37193-2ef8-43aa-bde4-98399ce3f955 was re-scheduled: A specified parameter was not correct: fileType [ 1908.060694] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1908.061087] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1908.061266] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1908.061481] env[68673]: DEBUG nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1908.061604] env[68673]: DEBUG nova.network.neutron [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1908.366527] env[68673]: DEBUG nova.network.neutron [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.376746] env[68673]: INFO nova.compute.manager [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Took 0.31 seconds to deallocate network for instance. [ 1908.462216] env[68673]: INFO nova.scheduler.client.report [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Deleted allocations for instance f1f37193-2ef8-43aa-bde4-98399ce3f955 [ 1908.485456] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ce8c65c3-0e3f-413e-a898-a651b6691af8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.170s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.486693] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.574s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.486922] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Acquiring lock "f1f37193-2ef8-43aa-bde4-98399ce3f955-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.487142] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.487313] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.489586] env[68673]: INFO nova.compute.manager [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Terminating instance [ 1908.492127] env[68673]: DEBUG nova.compute.manager [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1908.492227] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1908.492830] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4f69399-0a4f-4149-a228-2ba974534934 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.504070] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab53e60b-8b5b-4e14-8d63-f19cb0fe24e7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.514881] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1908.535742] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f1f37193-2ef8-43aa-bde4-98399ce3f955 could not be found. [ 1908.535936] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1908.536132] env[68673]: INFO nova.compute.manager [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1908.536375] env[68673]: DEBUG oslo.service.loopingcall [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1908.536593] env[68673]: DEBUG nova.compute.manager [-] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1908.536688] env[68673]: DEBUG nova.network.neutron [-] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1908.559899] env[68673]: DEBUG nova.network.neutron [-] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.562922] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.563192] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.564622] env[68673]: INFO nova.compute.claims [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1908.568259] env[68673]: INFO nova.compute.manager [-] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] Took 0.03 seconds to deallocate network for instance. [ 1908.646407] env[68673]: DEBUG oslo_concurrency.lockutils [None req-0a9cb401-e2db-4b6a-a70d-a6d6dcc0f9c8 tempest-AttachVolumeShelveTestJSON-1799110509 tempest-AttachVolumeShelveTestJSON-1799110509-project-member] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.647564] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 79.526s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.647758] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f1f37193-2ef8-43aa-bde4-98399ce3f955] During sync_power_state the instance has a pending task (deleting). Skip. [ 1908.647936] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "f1f37193-2ef8-43aa-bde4-98399ce3f955" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.723339] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e753f35f-048e-4166-a2ed-90066e186759 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.731054] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebb1f6d-11d4-46cb-acc8-5a3fde79df85 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.761399] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63a7ec3-61b2-48a1-99b2-8231717345e3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.768095] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8de80f-bb6a-4c42-9eb6-02cebbf6b7ad {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.780668] env[68673]: DEBUG nova.compute.provider_tree [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1908.790251] env[68673]: DEBUG nova.scheduler.client.report [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1908.804272] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.241s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.804722] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1908.835472] env[68673]: DEBUG nova.compute.utils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1908.836973] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1908.837181] env[68673]: DEBUG nova.network.neutron [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1908.846561] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1908.907532] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1908.931789] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1908.932047] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1908.932215] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1908.932401] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1908.932551] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1908.932701] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1908.933153] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1908.933153] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1908.933226] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1908.933390] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1908.933559] env[68673]: DEBUG nova.virt.hardware [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1908.934456] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4ab9c7-7d3c-4e72-aad1-fad1d4b7b056 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.938325] env[68673]: DEBUG nova.policy [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b0ed7aa0f5c413d9cc32a8bbf4724df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92e8efd351c449e8815c0ec3b6070d20', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1908.945355] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6b8b4c-dea2-422c-8c20-4ee697ac7d29 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.308153] env[68673]: DEBUG nova.network.neutron [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Successfully created port: 42a00151-8b1b-487c-8cab-a5a62ee69bee {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1909.967330] env[68673]: DEBUG nova.network.neutron [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Successfully updated port: 42a00151-8b1b-487c-8cab-a5a62ee69bee {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1909.980718] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "refresh_cache-0dd8e45a-d77a-4c9b-a733-353fce754549" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.980858] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "refresh_cache-0dd8e45a-d77a-4c9b-a733-353fce754549" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.981013] env[68673]: DEBUG nova.network.neutron [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1910.022197] env[68673]: DEBUG nova.network.neutron [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1910.255064] env[68673]: DEBUG nova.network.neutron [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Updating instance_info_cache with network_info: [{"id": "42a00151-8b1b-487c-8cab-a5a62ee69bee", "address": "fa:16:3e:ff:5a:10", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42a00151-8b", "ovs_interfaceid": "42a00151-8b1b-487c-8cab-a5a62ee69bee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.267870] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "refresh_cache-0dd8e45a-d77a-4c9b-a733-353fce754549" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.268184] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Instance network_info: |[{"id": "42a00151-8b1b-487c-8cab-a5a62ee69bee", "address": "fa:16:3e:ff:5a:10", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42a00151-8b", "ovs_interfaceid": "42a00151-8b1b-487c-8cab-a5a62ee69bee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1910.268588] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:5a:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42a00151-8b1b-487c-8cab-a5a62ee69bee', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1910.276383] env[68673]: DEBUG oslo.service.loopingcall [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1910.276901] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1910.277120] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c17a269c-b154-47ba-8038-ce6fde50bf88 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.297712] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1910.297712] env[68673]: value = "task-3433607" [ 1910.297712] env[68673]: _type = "Task" [ 1910.297712] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.305304] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433607, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.423499] env[68673]: DEBUG nova.compute.manager [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Received event network-vif-plugged-42a00151-8b1b-487c-8cab-a5a62ee69bee {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1910.423790] env[68673]: DEBUG oslo_concurrency.lockutils [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] Acquiring lock "0dd8e45a-d77a-4c9b-a733-353fce754549-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.424105] env[68673]: DEBUG oslo_concurrency.lockutils [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] Lock "0dd8e45a-d77a-4c9b-a733-353fce754549-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.424290] env[68673]: DEBUG oslo_concurrency.lockutils [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] Lock "0dd8e45a-d77a-4c9b-a733-353fce754549-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.424462] env[68673]: DEBUG nova.compute.manager [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] No waiting events found dispatching network-vif-plugged-42a00151-8b1b-487c-8cab-a5a62ee69bee {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1910.424626] env[68673]: WARNING nova.compute.manager [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Received unexpected event network-vif-plugged-42a00151-8b1b-487c-8cab-a5a62ee69bee for instance with vm_state building and task_state spawning. [ 1910.424973] env[68673]: DEBUG nova.compute.manager [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Received event network-changed-42a00151-8b1b-487c-8cab-a5a62ee69bee {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1910.425177] env[68673]: DEBUG nova.compute.manager [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Refreshing instance network info cache due to event network-changed-42a00151-8b1b-487c-8cab-a5a62ee69bee. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1910.425373] env[68673]: DEBUG oslo_concurrency.lockutils [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] Acquiring lock "refresh_cache-0dd8e45a-d77a-4c9b-a733-353fce754549" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.425511] env[68673]: DEBUG oslo_concurrency.lockutils [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] Acquired lock "refresh_cache-0dd8e45a-d77a-4c9b-a733-353fce754549" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.425664] env[68673]: DEBUG nova.network.neutron [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Refreshing network info cache for port 42a00151-8b1b-487c-8cab-a5a62ee69bee {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1910.712410] env[68673]: DEBUG nova.network.neutron [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Updated VIF entry in instance network info cache for port 42a00151-8b1b-487c-8cab-a5a62ee69bee. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1910.712756] env[68673]: DEBUG nova.network.neutron [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Updating instance_info_cache with network_info: [{"id": "42a00151-8b1b-487c-8cab-a5a62ee69bee", "address": "fa:16:3e:ff:5a:10", "network": {"id": "a1b26535-ff90-43bc-91b1-56de05ead3c7", "bridge": "br-int", "label": "tempest-ImagesTestJSON-310066573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e8efd351c449e8815c0ec3b6070d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42a00151-8b", "ovs_interfaceid": "42a00151-8b1b-487c-8cab-a5a62ee69bee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.721885] env[68673]: DEBUG oslo_concurrency.lockutils [req-abe28f4c-73d1-42f6-af31-80e76485863c req-2881b29c-50d0-4548-8d21-19f1b7122170 service nova] Releasing lock "refresh_cache-0dd8e45a-d77a-4c9b-a733-353fce754549" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.807597] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433607, 'name': CreateVM_Task, 'duration_secs': 0.287388} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.807759] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1910.814583] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.814793] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.815138] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1910.815377] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c913af50-1495-48a7-ac78-d1001966d011 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.819594] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 1910.819594] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52cf1ee9-47ca-0c35-8a40-c7274644849e" [ 1910.819594] env[68673]: _type = "Task" [ 1910.819594] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.826832] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52cf1ee9-47ca-0c35-8a40-c7274644849e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.330221] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.330579] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1911.330759] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.784257] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1912.779132] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1913.783889] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1913.784196] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1914.783892] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.784182] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1914.784221] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1914.807026] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807026] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807026] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807365] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807365] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807365] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807483] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807587] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807733] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807814] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1914.807927] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1916.803531] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.784057] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.784057] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.784368] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.796055] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.796269] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.796447] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.796598] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1917.797689] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a2b7a7-a853-4873-8387-de818d15df7d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.806080] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef829e9-3caa-4029-9b8c-5ca6a60ec098 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.819505] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2950217-06f8-42c2-85a2-fe00d6e6e0da {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.825442] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63bfba5-c382-4e4a-aa5c-513c472fd76c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.855402] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180866MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1917.855542] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.855726] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.924080] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 218a1129-966d-4512-8b4b-222d31ceb106 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.924261] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.924395] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.924516] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.924635] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.924753] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.924867] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.924983] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.925115] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.925229] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.925415] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1917.925548] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1918.039173] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45093da6-0517-4db1-b491-4d9d4d7d0205 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.046564] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b716596-e338-40d9-9f2f-bd09ad0f87b9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.075280] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83fc626-cd41-421c-bd4c-d27f5d979d29 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.081815] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd5ddf2-b81e-4522-a01b-0d7de9b2b36a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.095305] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.103281] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1918.116356] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1918.116356] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.261s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.116686] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1923.783615] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1937.914479] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.914806] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.102927] env[68673]: WARNING oslo_vmware.rw_handles [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1957.102927] env[68673]: ERROR oslo_vmware.rw_handles [ 1957.103694] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1957.105466] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1957.105760] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Copying Virtual Disk [datastore1] vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/aca48934-e550-41c4-ba64-57d2ebc17f1d/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1957.106072] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdc43b24-2000-4864-a2bf-eda7b30ac4e0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.114110] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Waiting for the task: (returnval){ [ 1957.114110] env[68673]: value = "task-3433608" [ 1957.114110] env[68673]: _type = "Task" [ 1957.114110] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.122123] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Task: {'id': task-3433608, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.625361] env[68673]: DEBUG oslo_vmware.exceptions [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1957.625715] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1957.626281] env[68673]: ERROR nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1957.626281] env[68673]: Faults: ['InvalidArgument'] [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Traceback (most recent call last): [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] yield resources [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] self.driver.spawn(context, instance, image_meta, [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] self._fetch_image_if_missing(context, vi) [ 1957.626281] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] image_cache(vi, tmp_image_ds_loc) [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] vm_util.copy_virtual_disk( [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] session._wait_for_task(vmdk_copy_task) [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] return self.wait_for_task(task_ref) [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] return evt.wait() [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] result = hub.switch() [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1957.626714] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] return self.greenlet.switch() [ 1957.627183] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1957.627183] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] self.f(*self.args, **self.kw) [ 1957.627183] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1957.627183] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] raise exceptions.translate_fault(task_info.error) [ 1957.627183] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1957.627183] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Faults: ['InvalidArgument'] [ 1957.627183] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] [ 1957.627183] env[68673]: INFO nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Terminating instance [ 1957.628297] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.628508] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1957.628753] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e13aa47-44f9-4916-95c3-43339b68c449 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.632037] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1957.632037] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1957.632548] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b66d18-1648-4e1e-8479-30b92e692c37 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.639788] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1957.640071] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3cff4e9-185e-4eb3-b0a7-2e76a0cbbce1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.642195] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1957.642432] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1957.643393] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca788134-2641-4fa4-a1c5-169eeca4c2fd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.648068] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Waiting for the task: (returnval){ [ 1957.648068] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524119e7-c86d-4cc2-70f4-56e9b197c643" [ 1957.648068] env[68673]: _type = "Task" [ 1957.648068] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.655197] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524119e7-c86d-4cc2-70f4-56e9b197c643, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.708672] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1957.708887] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1957.709084] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Deleting the datastore file [datastore1] 218a1129-966d-4512-8b4b-222d31ceb106 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1957.709334] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-563599e7-f7f2-49aa-9fce-5dad135d65f1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.714997] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Waiting for the task: (returnval){ [ 1957.714997] env[68673]: value = "task-3433610" [ 1957.714997] env[68673]: _type = "Task" [ 1957.714997] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.722219] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Task: {'id': task-3433610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.159360] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1958.159643] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Creating directory with path [datastore1] vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1958.159851] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f18c81cb-954d-443f-848b-155b5f35edb5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.171408] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Created directory with path [datastore1] vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1958.171595] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Fetch image to [datastore1] vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1958.171764] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1958.172504] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7095f2e6-0357-4864-9ba3-651ecb0eae86 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.179985] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31c9e85-fe58-44f7-b7de-f8009603c3f1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.195645] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98d987c-f95b-428c-bd45-e32cf2b24eec {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.230512] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf49312f-5ad5-4b9d-88d0-f3ded8cd7357 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.237512] env[68673]: DEBUG oslo_vmware.api [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Task: {'id': task-3433610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079625} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.238980] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1958.239186] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1958.239360] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1958.239531] env[68673]: INFO nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1958.241307] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d65cf535-59f6-4153-b5f9-f982fe7ddc8e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.243171] env[68673]: DEBUG nova.compute.claims [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1958.243409] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.243552] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.265300] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1958.322252] env[68673]: DEBUG oslo_vmware.rw_handles [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1958.383621] env[68673]: DEBUG oslo_vmware.rw_handles [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1958.383805] env[68673]: DEBUG oslo_vmware.rw_handles [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1958.471790] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7204a104-8544-4ea1-8d9a-5db259086676 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.479349] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c493df-9431-4f2e-81e2-9cb201ada943 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.509502] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6cd84e-a877-41fa-8e98-4a57163db1d5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.516092] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d44125-e4aa-47e6-b8da-b67f201d357c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.528524] env[68673]: DEBUG nova.compute.provider_tree [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1958.537310] env[68673]: DEBUG nova.scheduler.client.report [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1958.550873] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.307s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.551377] env[68673]: ERROR nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1958.551377] env[68673]: Faults: ['InvalidArgument'] [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Traceback (most recent call last): [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] self.driver.spawn(context, instance, image_meta, [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] self._fetch_image_if_missing(context, vi) [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] image_cache(vi, tmp_image_ds_loc) [ 1958.551377] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] vm_util.copy_virtual_disk( [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] session._wait_for_task(vmdk_copy_task) [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] return self.wait_for_task(task_ref) [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] return evt.wait() [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] result = hub.switch() [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] return self.greenlet.switch() [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1958.551793] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] self.f(*self.args, **self.kw) [ 1958.552222] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1958.552222] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] raise exceptions.translate_fault(task_info.error) [ 1958.552222] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1958.552222] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Faults: ['InvalidArgument'] [ 1958.552222] env[68673]: ERROR nova.compute.manager [instance: 218a1129-966d-4512-8b4b-222d31ceb106] [ 1958.552222] env[68673]: DEBUG nova.compute.utils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1958.553335] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Build of instance 218a1129-966d-4512-8b4b-222d31ceb106 was re-scheduled: A specified parameter was not correct: fileType [ 1958.553335] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1958.553714] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1958.553908] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1958.554095] env[68673]: DEBUG nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1958.554260] env[68673]: DEBUG nova.network.neutron [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1958.914360] env[68673]: DEBUG nova.network.neutron [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.926240] env[68673]: INFO nova.compute.manager [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Took 0.37 seconds to deallocate network for instance. [ 1959.020336] env[68673]: INFO nova.scheduler.client.report [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Deleted allocations for instance 218a1129-966d-4512-8b4b-222d31ceb106 [ 1959.040504] env[68673]: DEBUG oslo_concurrency.lockutils [None req-ba497a9d-4a9c-4059-9c02-c0a1e38e2822 tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "218a1129-966d-4512-8b4b-222d31ceb106" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 626.966s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.041705] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "218a1129-966d-4512-8b4b-222d31ceb106" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 430.764s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.041981] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Acquiring lock "218a1129-966d-4512-8b4b-222d31ceb106-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.042803] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "218a1129-966d-4512-8b4b-222d31ceb106-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.042803] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "218a1129-966d-4512-8b4b-222d31ceb106-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.044490] env[68673]: INFO nova.compute.manager [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Terminating instance [ 1959.046294] env[68673]: DEBUG nova.compute.manager [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1959.046499] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1959.046953] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-366eed60-ec04-44fe-bdc8-518f9da8b8f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.052883] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1959.059595] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52b3a09-990a-4a1d-8277-f7c0d559e667 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.089831] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 218a1129-966d-4512-8b4b-222d31ceb106 could not be found. [ 1959.089831] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1959.089831] env[68673]: INFO nova.compute.manager [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1959.089831] env[68673]: DEBUG oslo.service.loopingcall [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1959.092012] env[68673]: DEBUG nova.compute.manager [-] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1959.092155] env[68673]: DEBUG nova.network.neutron [-] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1959.105661] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.105965] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.107635] env[68673]: INFO nova.compute.claims [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1959.120167] env[68673]: DEBUG nova.network.neutron [-] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1959.128516] env[68673]: INFO nova.compute.manager [-] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] Took 0.04 seconds to deallocate network for instance. [ 1959.213420] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d1817623-838d-4564-85d7-cee3d3863fbc tempest-ServerMetadataTestJSON-1572474438 tempest-ServerMetadataTestJSON-1572474438-project-member] Lock "218a1129-966d-4512-8b4b-222d31ceb106" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.214298] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "218a1129-966d-4512-8b4b-222d31ceb106" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 130.093s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.214487] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 218a1129-966d-4512-8b4b-222d31ceb106] During sync_power_state the instance has a pending task (deleting). Skip. [ 1959.214658] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "218a1129-966d-4512-8b4b-222d31ceb106" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.277682] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11eeaf2-c03c-43b6-94bc-d4ac0b03a9a8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.284945] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b89748d-51be-487f-9cd4-304cf2d28f24 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.314739] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ac7699-eb82-4fc0-a13f-dd66fa6ad923 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.321796] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cd864a-e337-4b1e-92ac-53db357c3ff4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.334907] env[68673]: DEBUG nova.compute.provider_tree [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1959.343695] env[68673]: DEBUG nova.scheduler.client.report [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1959.357306] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.251s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.357790] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1959.391587] env[68673]: DEBUG nova.compute.utils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1959.393113] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1959.393283] env[68673]: DEBUG nova.network.neutron [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1959.401370] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1959.469077] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1959.472523] env[68673]: DEBUG nova.policy [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a135381c6987442cad21b1f74d5a9e34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8ecabf65f524fb5bfcb60401c45db96', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 1959.498013] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1959.498315] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1959.498476] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1959.498655] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1959.498801] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1959.498948] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1959.499182] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1959.499345] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1959.499513] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1959.499673] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1959.499843] env[68673]: DEBUG nova.virt.hardware [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1959.500712] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc3810f-75dd-4758-a6d9-cf6ded20ace2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.508655] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4737d758-c3dd-4ac7-9943-dd3384c8fd6f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.826809] env[68673]: DEBUG nova.network.neutron [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Successfully created port: 9936b488-8a1a-4cbd-ab6e-996828b941ea {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1960.416173] env[68673]: DEBUG nova.network.neutron [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Successfully updated port: 9936b488-8a1a-4cbd-ab6e-996828b941ea {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1960.426795] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "refresh_cache-ec4370aa-2353-4f9c-82c7-5707e3337c94" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.426963] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "refresh_cache-ec4370aa-2353-4f9c-82c7-5707e3337c94" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.427133] env[68673]: DEBUG nova.network.neutron [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1960.484688] env[68673]: DEBUG nova.network.neutron [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1960.733371] env[68673]: DEBUG nova.network.neutron [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Updating instance_info_cache with network_info: [{"id": "9936b488-8a1a-4cbd-ab6e-996828b941ea", "address": "fa:16:3e:24:7b:d3", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9936b488-8a", "ovs_interfaceid": "9936b488-8a1a-4cbd-ab6e-996828b941ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.748696] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "refresh_cache-ec4370aa-2353-4f9c-82c7-5707e3337c94" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.748696] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Instance network_info: |[{"id": "9936b488-8a1a-4cbd-ab6e-996828b941ea", "address": "fa:16:3e:24:7b:d3", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9936b488-8a", "ovs_interfaceid": "9936b488-8a1a-4cbd-ab6e-996828b941ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1960.748931] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:7b:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9936b488-8a1a-4cbd-ab6e-996828b941ea', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1960.756524] env[68673]: DEBUG oslo.service.loopingcall [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1960.757042] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1960.757267] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1bae1102-fc26-4477-8a31-d55b935efd32 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.778375] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1960.778375] env[68673]: value = "task-3433611" [ 1960.778375] env[68673]: _type = "Task" [ 1960.778375] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.786404] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433611, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.843478] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "16320084-1a09-493a-8ff2-389da64b92a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.952481] env[68673]: DEBUG nova.compute.manager [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Received event network-vif-plugged-9936b488-8a1a-4cbd-ab6e-996828b941ea {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1960.952481] env[68673]: DEBUG oslo_concurrency.lockutils [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] Acquiring lock "ec4370aa-2353-4f9c-82c7-5707e3337c94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.952481] env[68673]: DEBUG oslo_concurrency.lockutils [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.952481] env[68673]: DEBUG oslo_concurrency.lockutils [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.952633] env[68673]: DEBUG nova.compute.manager [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] No waiting events found dispatching network-vif-plugged-9936b488-8a1a-4cbd-ab6e-996828b941ea {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1960.952834] env[68673]: WARNING nova.compute.manager [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Received unexpected event network-vif-plugged-9936b488-8a1a-4cbd-ab6e-996828b941ea for instance with vm_state building and task_state spawning. [ 1960.952949] env[68673]: DEBUG nova.compute.manager [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Received event network-changed-9936b488-8a1a-4cbd-ab6e-996828b941ea {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1960.953134] env[68673]: DEBUG nova.compute.manager [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Refreshing instance network info cache due to event network-changed-9936b488-8a1a-4cbd-ab6e-996828b941ea. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1960.953358] env[68673]: DEBUG oslo_concurrency.lockutils [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] Acquiring lock "refresh_cache-ec4370aa-2353-4f9c-82c7-5707e3337c94" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.953523] env[68673]: DEBUG oslo_concurrency.lockutils [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] Acquired lock "refresh_cache-ec4370aa-2353-4f9c-82c7-5707e3337c94" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.953726] env[68673]: DEBUG nova.network.neutron [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Refreshing network info cache for port 9936b488-8a1a-4cbd-ab6e-996828b941ea {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1961.288211] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433611, 'name': CreateVM_Task, 'duration_secs': 0.281509} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.288380] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1961.289018] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.289180] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.289486] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1961.289726] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de291e81-74a4-4491-88c7-634bfd484017 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.294466] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 1961.294466] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524487a9-3d76-dcda-6608-97241b1c8b0e" [ 1961.294466] env[68673]: _type = "Task" [ 1961.294466] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.302740] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524487a9-3d76-dcda-6608-97241b1c8b0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.389192] env[68673]: DEBUG nova.network.neutron [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Updated VIF entry in instance network info cache for port 9936b488-8a1a-4cbd-ab6e-996828b941ea. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1961.389541] env[68673]: DEBUG nova.network.neutron [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Updating instance_info_cache with network_info: [{"id": "9936b488-8a1a-4cbd-ab6e-996828b941ea", "address": "fa:16:3e:24:7b:d3", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9936b488-8a", "ovs_interfaceid": "9936b488-8a1a-4cbd-ab6e-996828b941ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.398786] env[68673]: DEBUG oslo_concurrency.lockutils [req-3d4f95bb-75fd-4164-acfa-736fb4badef0 req-d544f3bb-eebd-4e37-9c32-a4d5623a24dc service nova] Releasing lock "refresh_cache-ec4370aa-2353-4f9c-82c7-5707e3337c94" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.807366] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.807736] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1961.807843] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.981811] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "07f2fc85-14df-4702-bf49-67d8ce8e9526" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.982171] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "07f2fc85-14df-4702-bf49-67d8ce8e9526" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.784632] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1974.779308] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1974.782907] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1974.783128] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1974.783289] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1974.804993] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.805144] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.805308] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.805460] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.805587] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.805710] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.805829] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.805946] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.806075] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.806194] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1974.806314] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1975.784704] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1975.784704] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1978.784114] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1978.784383] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.783618] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.783854] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.795386] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.795698] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.795748] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.795907] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1979.797189] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14730e7e-cdee-400a-84cc-954000c00d3d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.806411] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8ed31d-937f-457a-9163-879d66cbe558 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.820135] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db3650d-926d-4258-b7b6-f0d7df9d612d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.826269] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72da4343-d67f-4ceb-b277-07a137bff5fe {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.856157] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180889MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1979.856310] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.856499] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.926102] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.926465] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 601dfed1-fb7b-413a-836d-7fda61314c73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.926755] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.927716] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.927716] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.927716] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.927716] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.927937] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.927937] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.927937] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1979.955104] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 07f2fc85-14df-4702-bf49-67d8ce8e9526 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1979.955410] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1979.955596] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1980.082198] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a886a9-ef8a-4d0c-8c09-86503de3ee79 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.089497] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d48b4e7-236d-4561-902a-10c68b844d6d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.119192] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7fe3ede-40d3-4c1a-a7cc-4ca1b8508b91 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.125768] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54052617-aac8-47a0-9d74-536c2c44de3d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.138301] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1980.146973] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1980.160095] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1980.160274] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.304s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.160751] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.170154] env[68673]: WARNING oslo_vmware.rw_handles [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2006.170154] env[68673]: ERROR oslo_vmware.rw_handles [ 2006.170873] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2006.173760] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2006.174081] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Copying Virtual Disk [datastore1] vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/5ec6f101-da38-4313-b9f7-541032aaf52d/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2006.174482] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e057db5-6339-450e-a660-3f69b5a9df71 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.184197] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Waiting for the task: (returnval){ [ 2006.184197] env[68673]: value = "task-3433612" [ 2006.184197] env[68673]: _type = "Task" [ 2006.184197] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.192042] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Task: {'id': task-3433612, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.694182] env[68673]: DEBUG oslo_vmware.exceptions [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2006.694461] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.695010] env[68673]: ERROR nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2006.695010] env[68673]: Faults: ['InvalidArgument'] [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Traceback (most recent call last): [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] yield resources [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] self.driver.spawn(context, instance, image_meta, [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] self._fetch_image_if_missing(context, vi) [ 2006.695010] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] image_cache(vi, tmp_image_ds_loc) [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] vm_util.copy_virtual_disk( [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] session._wait_for_task(vmdk_copy_task) [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] return self.wait_for_task(task_ref) [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] return evt.wait() [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] result = hub.switch() [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2006.695437] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] return self.greenlet.switch() [ 2006.695880] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2006.695880] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] self.f(*self.args, **self.kw) [ 2006.695880] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2006.695880] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] raise exceptions.translate_fault(task_info.error) [ 2006.695880] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2006.695880] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Faults: ['InvalidArgument'] [ 2006.695880] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] [ 2006.695880] env[68673]: INFO nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Terminating instance [ 2006.696887] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.697105] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2006.697333] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8ac16fc-aad0-4b1c-9d42-cd641667d431 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.699601] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2006.699789] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2006.700478] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d62d74-d230-42dd-8b4b-e1c1a4d2c7c5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.706527] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2006.706725] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b834809e-5c27-4d5a-8981-23770a5f758a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.708711] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2006.708900] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2006.709801] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4ca4901-a1de-41ff-8d24-6847e33fe62c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.714820] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 2006.714820] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5279089e-7290-b6bd-ca44-151f7f039202" [ 2006.714820] env[68673]: _type = "Task" [ 2006.714820] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.726775] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5279089e-7290-b6bd-ca44-151f7f039202, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.778269] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2006.778552] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2006.778769] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Deleting the datastore file [datastore1] 601dfed1-fb7b-413a-836d-7fda61314c73 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2006.779051] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78bd0bb6-cf66-4265-b7e1-cc2d2544c1b7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.784934] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Waiting for the task: (returnval){ [ 2006.784934] env[68673]: value = "task-3433614" [ 2006.784934] env[68673]: _type = "Task" [ 2006.784934] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.792352] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Task: {'id': task-3433614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.225356] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2007.225756] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating directory with path [datastore1] vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2007.225832] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a1538cc-032f-4ab0-b3c9-210c0f8ff238 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.238484] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created directory with path [datastore1] vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2007.238659] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Fetch image to [datastore1] vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2007.238867] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2007.239603] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec93464-4f3b-456b-9957-4c4e040906bd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.245872] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfab616-4830-4d24-a997-dfa621c92548 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.254390] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2992b19-a8fb-4a51-b4f7-8033a36289d9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.284995] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e9b2d5-33e1-4d71-9b99-bfa919940831 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.295171] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-acef88c9-4a72-45f9-84eb-44ae8f2886ed {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.296760] env[68673]: DEBUG oslo_vmware.api [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Task: {'id': task-3433614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088347} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.296988] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2007.297180] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2007.297344] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2007.297520] env[68673]: INFO nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2007.299618] env[68673]: DEBUG nova.compute.claims [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2007.299804] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.300031] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.319457] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2007.368983] env[68673]: DEBUG oslo_vmware.rw_handles [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2007.429704] env[68673]: DEBUG oslo_vmware.rw_handles [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2007.429901] env[68673]: DEBUG oslo_vmware.rw_handles [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2007.530210] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f585ec-5736-46b3-8459-d1e097ee2b49 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.537904] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44effacf-72f7-4ee8-884c-084c6ac774e9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.568029] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba514d4-d346-4173-8593-1ebc771e8cc8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.575067] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d125984-6697-410f-a428-6fadf529da7a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.587840] env[68673]: DEBUG nova.compute.provider_tree [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2007.599946] env[68673]: DEBUG nova.scheduler.client.report [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2007.615809] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.316s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.616360] env[68673]: ERROR nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2007.616360] env[68673]: Faults: ['InvalidArgument'] [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Traceback (most recent call last): [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] self.driver.spawn(context, instance, image_meta, [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] self._fetch_image_if_missing(context, vi) [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] image_cache(vi, tmp_image_ds_loc) [ 2007.616360] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] vm_util.copy_virtual_disk( [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] session._wait_for_task(vmdk_copy_task) [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] return self.wait_for_task(task_ref) [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] return evt.wait() [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] result = hub.switch() [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] return self.greenlet.switch() [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2007.616770] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] self.f(*self.args, **self.kw) [ 2007.617250] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2007.617250] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] raise exceptions.translate_fault(task_info.error) [ 2007.617250] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2007.617250] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Faults: ['InvalidArgument'] [ 2007.617250] env[68673]: ERROR nova.compute.manager [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] [ 2007.617250] env[68673]: DEBUG nova.compute.utils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2007.618903] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Build of instance 601dfed1-fb7b-413a-836d-7fda61314c73 was re-scheduled: A specified parameter was not correct: fileType [ 2007.618903] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2007.619304] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2007.619480] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2007.619663] env[68673]: DEBUG nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2007.619827] env[68673]: DEBUG nova.network.neutron [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2007.926208] env[68673]: DEBUG nova.network.neutron [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.939123] env[68673]: INFO nova.compute.manager [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Took 0.32 seconds to deallocate network for instance. [ 2008.042605] env[68673]: INFO nova.scheduler.client.report [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Deleted allocations for instance 601dfed1-fb7b-413a-836d-7fda61314c73 [ 2008.063053] env[68673]: DEBUG oslo_concurrency.lockutils [None req-791c9291-e659-4769-826e-8772125478c7 tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "601dfed1-fb7b-413a-836d-7fda61314c73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 580.415s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.064411] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "601dfed1-fb7b-413a-836d-7fda61314c73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 383.776s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.064667] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Acquiring lock "601dfed1-fb7b-413a-836d-7fda61314c73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.064947] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "601dfed1-fb7b-413a-836d-7fda61314c73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.065137] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "601dfed1-fb7b-413a-836d-7fda61314c73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.066976] env[68673]: INFO nova.compute.manager [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Terminating instance [ 2008.068666] env[68673]: DEBUG nova.compute.manager [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2008.068856] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2008.069334] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bc1de9f-cc79-426c-bdf2-29950f3856f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.074827] env[68673]: DEBUG nova.compute.manager [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2008.081053] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2c717a-fdbe-4f50-bc6c-33b267d1c716 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.109684] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 601dfed1-fb7b-413a-836d-7fda61314c73 could not be found. [ 2008.109877] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2008.110062] env[68673]: INFO nova.compute.manager [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2008.110333] env[68673]: DEBUG oslo.service.loopingcall [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2008.112466] env[68673]: DEBUG nova.compute.manager [-] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2008.112564] env[68673]: DEBUG nova.network.neutron [-] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2008.125718] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.125962] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.127440] env[68673]: INFO nova.compute.claims [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2008.136988] env[68673]: DEBUG nova.network.neutron [-] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2008.146850] env[68673]: INFO nova.compute.manager [-] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] Took 0.03 seconds to deallocate network for instance. [ 2008.234083] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bb8cb513-2ba8-4f1a-abbb-df9efaa3c71d tempest-ServerRescueTestJSON-391359122 tempest-ServerRescueTestJSON-391359122-project-member] Lock "601dfed1-fb7b-413a-836d-7fda61314c73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.234958] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "601dfed1-fb7b-413a-836d-7fda61314c73" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 179.113s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.235112] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 601dfed1-fb7b-413a-836d-7fda61314c73] During sync_power_state the instance has a pending task (deleting). Skip. [ 2008.235286] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "601dfed1-fb7b-413a-836d-7fda61314c73" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.289335] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b6c7c0-aea3-4fe0-848b-6034057b2f11 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.296764] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d721b5ec-034f-4572-aeb4-89e8e2b697b5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.327597] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23ac14e-add9-408b-a871-2b1320eaa797 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.334323] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadc3959-ea5e-424c-9773-2b4ba1f8b09d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.346952] env[68673]: DEBUG nova.compute.provider_tree [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2008.356186] env[68673]: DEBUG nova.scheduler.client.report [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2008.370352] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.244s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.370876] env[68673]: DEBUG nova.compute.manager [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2008.404203] env[68673]: DEBUG nova.compute.utils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2008.406294] env[68673]: DEBUG nova.compute.manager [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2008.406470] env[68673]: DEBUG nova.network.neutron [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2008.414608] env[68673]: DEBUG nova.compute.manager [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2008.460779] env[68673]: DEBUG nova.policy [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f617cfb6919840ad99e1320228344b18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c73dd528430445bb8717487ffd7dd780', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 2008.478748] env[68673]: DEBUG nova.compute.manager [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2008.499562] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2008.499813] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2008.500035] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2008.500195] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2008.500362] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2008.500512] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2008.500723] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2008.500882] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2008.501064] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2008.501232] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2008.501407] env[68673]: DEBUG nova.virt.hardware [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2008.502291] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43afd225-2e17-428b-8a08-03758ea38a57 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.512098] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900034e5-13ff-46e0-a083-52750229264d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.877094] env[68673]: DEBUG nova.network.neutron [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Successfully created port: 507be42c-a621-479c-8114-bafcbdac34a8 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2009.438599] env[68673]: DEBUG nova.network.neutron [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Successfully updated port: 507be42c-a621-479c-8114-bafcbdac34a8 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2009.451636] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "refresh_cache-07f2fc85-14df-4702-bf49-67d8ce8e9526" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.451796] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "refresh_cache-07f2fc85-14df-4702-bf49-67d8ce8e9526" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.451947] env[68673]: DEBUG nova.network.neutron [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2009.679680] env[68673]: DEBUG nova.network.neutron [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2009.918291] env[68673]: DEBUG nova.network.neutron [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Updating instance_info_cache with network_info: [{"id": "507be42c-a621-479c-8114-bafcbdac34a8", "address": "fa:16:3e:99:3b:e5", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507be42c-a6", "ovs_interfaceid": "507be42c-a621-479c-8114-bafcbdac34a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.932145] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Releasing lock "refresh_cache-07f2fc85-14df-4702-bf49-67d8ce8e9526" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.932436] env[68673]: DEBUG nova.compute.manager [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Instance network_info: |[{"id": "507be42c-a621-479c-8114-bafcbdac34a8", "address": "fa:16:3e:99:3b:e5", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507be42c-a6", "ovs_interfaceid": "507be42c-a621-479c-8114-bafcbdac34a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2009.932854] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:3b:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '507be42c-a621-479c-8114-bafcbdac34a8', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2009.941123] env[68673]: DEBUG oslo.service.loopingcall [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2009.941635] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2009.941859] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90f1fa14-f87b-4c6e-af7d-a4bf247f0720 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.964838] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2009.964838] env[68673]: value = "task-3433615" [ 2009.964838] env[68673]: _type = "Task" [ 2009.964838] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.967388] env[68673]: DEBUG nova.compute.manager [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Received event network-vif-plugged-507be42c-a621-479c-8114-bafcbdac34a8 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2009.967536] env[68673]: DEBUG oslo_concurrency.lockutils [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] Acquiring lock "07f2fc85-14df-4702-bf49-67d8ce8e9526-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2009.967741] env[68673]: DEBUG oslo_concurrency.lockutils [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] Lock "07f2fc85-14df-4702-bf49-67d8ce8e9526-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.967911] env[68673]: DEBUG oslo_concurrency.lockutils [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] Lock "07f2fc85-14df-4702-bf49-67d8ce8e9526-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.968112] env[68673]: DEBUG nova.compute.manager [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] No waiting events found dispatching network-vif-plugged-507be42c-a621-479c-8114-bafcbdac34a8 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2009.968285] env[68673]: WARNING nova.compute.manager [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Received unexpected event network-vif-plugged-507be42c-a621-479c-8114-bafcbdac34a8 for instance with vm_state building and task_state spawning. [ 2009.968441] env[68673]: DEBUG nova.compute.manager [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Received event network-changed-507be42c-a621-479c-8114-bafcbdac34a8 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2009.968589] env[68673]: DEBUG nova.compute.manager [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Refreshing instance network info cache due to event network-changed-507be42c-a621-479c-8114-bafcbdac34a8. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2009.968764] env[68673]: DEBUG oslo_concurrency.lockutils [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] Acquiring lock "refresh_cache-07f2fc85-14df-4702-bf49-67d8ce8e9526" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.968894] env[68673]: DEBUG oslo_concurrency.lockutils [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] Acquired lock "refresh_cache-07f2fc85-14df-4702-bf49-67d8ce8e9526" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.969092] env[68673]: DEBUG nova.network.neutron [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Refreshing network info cache for port 507be42c-a621-479c-8114-bafcbdac34a8 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2009.980189] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433615, 'name': CreateVM_Task} progress is 6%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.309073] env[68673]: DEBUG nova.network.neutron [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Updated VIF entry in instance network info cache for port 507be42c-a621-479c-8114-bafcbdac34a8. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2010.309443] env[68673]: DEBUG nova.network.neutron [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Updating instance_info_cache with network_info: [{"id": "507be42c-a621-479c-8114-bafcbdac34a8", "address": "fa:16:3e:99:3b:e5", "network": {"id": "373a1e91-3242-4f5c-a622-1310602c2fac", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1302582399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73dd528430445bb8717487ffd7dd780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap507be42c-a6", "ovs_interfaceid": "507be42c-a621-479c-8114-bafcbdac34a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2010.318644] env[68673]: DEBUG oslo_concurrency.lockutils [req-2c27616f-a2d0-4a01-bee2-59108a76169e req-e8d27f91-29e4-41fc-aec4-84baf9a1f6ed service nova] Releasing lock "refresh_cache-07f2fc85-14df-4702-bf49-67d8ce8e9526" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2010.476470] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433615, 'name': CreateVM_Task} progress is 25%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.977229] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433615, 'name': CreateVM_Task, 'duration_secs': 0.625245} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.977701] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2010.978150] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.978322] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.978760] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2010.978876] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39addd80-c944-49f3-a648-297943b57570 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.983126] env[68673]: DEBUG oslo_vmware.api [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 2010.983126] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52702da4-b6a7-bc52-8e61-f363abf98da7" [ 2010.983126] env[68673]: _type = "Task" [ 2010.983126] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.990395] env[68673]: DEBUG oslo_vmware.api [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52702da4-b6a7-bc52-8e61-f363abf98da7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.494061] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.494061] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2011.494540] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.113242] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "0dd8e45a-d77a-4c9b-a733-353fce754549" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.784133] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2035.783945] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2035.783945] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2036.779109] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2036.779414] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2036.801879] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2036.801879] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2036.801879] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2036.821024] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.821188] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.821321] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.821446] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.821568] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.821688] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.821805] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.821923] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.822314] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.822314] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2036.822314] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2038.784259] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2039.784664] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2039.784966] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2039.796630] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.796843] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.797022] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.797184] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2039.798269] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b772644a-4fba-45e1-973d-eb0b031db4a5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.806552] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44a0097-1f1a-45f5-883b-aa132df0ec4b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.820194] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa30a7a-8efb-436d-8628-abd623f757a5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.826129] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b709a859-873b-4928-a590-e3cde8809b2f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.854506] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180899MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2039.854506] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.854671] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.923597] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.923766] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.923896] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.924036] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.924167] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.924284] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.924400] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.924595] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.924666] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.924729] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 07f2fc85-14df-4702-bf49-67d8ce8e9526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2039.924907] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2039.925058] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2040.048360] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43dda9a3-93b3-4d81-9709-71e0453defee {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.055866] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f754ce-ccaf-463f-bf3c-2f56c1bf0e9e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.084818] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b31c00b-3050-4a9b-9cc6-108de93bb031 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.091320] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b243b3-322f-4b89-913c-bad60b588237 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.103885] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2040.111957] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2040.124926] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2040.125125] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.271s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.124545] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2044.784183] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2056.674693] env[68673]: WARNING oslo_vmware.rw_handles [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2056.674693] env[68673]: ERROR oslo_vmware.rw_handles [ 2056.675519] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2056.677096] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2056.677333] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Copying Virtual Disk [datastore1] vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/322e0e8a-936d-4638-a1fd-8b69870acaef/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2056.677621] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa7340a7-d08f-4616-8b15-22dd13fa6b76 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.685253] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 2056.685253] env[68673]: value = "task-3433616" [ 2056.685253] env[68673]: _type = "Task" [ 2056.685253] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.692956] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433616, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.195593] env[68673]: DEBUG oslo_vmware.exceptions [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2057.195846] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2057.196396] env[68673]: ERROR nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2057.196396] env[68673]: Faults: ['InvalidArgument'] [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Traceback (most recent call last): [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] yield resources [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] self.driver.spawn(context, instance, image_meta, [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] self._fetch_image_if_missing(context, vi) [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2057.196396] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] image_cache(vi, tmp_image_ds_loc) [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] vm_util.copy_virtual_disk( [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] session._wait_for_task(vmdk_copy_task) [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] return self.wait_for_task(task_ref) [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] return evt.wait() [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] result = hub.switch() [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] return self.greenlet.switch() [ 2057.196991] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2057.197458] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] self.f(*self.args, **self.kw) [ 2057.197458] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2057.197458] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] raise exceptions.translate_fault(task_info.error) [ 2057.197458] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2057.197458] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Faults: ['InvalidArgument'] [ 2057.197458] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] [ 2057.197458] env[68673]: INFO nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Terminating instance [ 2057.198322] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.198531] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2057.198766] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0983e3b4-028a-4ab1-a874-7529d3aa7735 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.200922] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2057.201191] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2057.201908] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50406101-c31e-4084-8a2c-a90801a49626 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.209665] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2057.209873] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c589ebee-ac7e-45d7-9024-61c2d9c27361 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.211951] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2057.212132] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2057.213076] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-275c07fc-caee-4373-90a6-56c336db4513 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.217624] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Waiting for the task: (returnval){ [ 2057.217624] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52612b88-4418-cee8-dd0a-0c498c93d35a" [ 2057.217624] env[68673]: _type = "Task" [ 2057.217624] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.226017] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52612b88-4418-cee8-dd0a-0c498c93d35a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.276025] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2057.276274] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2057.276436] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleting the datastore file [datastore1] c6f7698c-3a1d-47e7-aeac-fd0e50376a39 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2057.276689] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f0a361d-a2ee-4115-8198-8af3ae37a0bf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.282881] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 2057.282881] env[68673]: value = "task-3433618" [ 2057.282881] env[68673]: _type = "Task" [ 2057.282881] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.290170] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.728413] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2057.728700] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Creating directory with path [datastore1] vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2057.728910] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85322ee2-82fc-4dc2-b3f5-8b355d423466 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.741019] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Created directory with path [datastore1] vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2057.741242] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Fetch image to [datastore1] vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2057.741418] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2057.742150] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27484998-3843-4c3d-97be-efa1e538210e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.748548] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378a987e-f321-4898-ac7c-506cf84f6ec8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.757398] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72681017-25f4-495c-a337-5c822ff5366d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.789927] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0e7632-30eb-4439-990d-41863ec81b21 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.796856] env[68673]: DEBUG oslo_vmware.api [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074475} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.798232] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2057.798420] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2057.798596] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2057.798767] env[68673]: INFO nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2057.800504] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a1dbbe56-8ee6-483e-88fd-9d7507920048 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.802324] env[68673]: DEBUG nova.compute.claims [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2057.802512] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.802725] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.824274] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2057.960382] env[68673]: DEBUG oslo_vmware.rw_handles [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2058.019424] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8756aaf-bdce-44ad-a030-be6789a333df {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.024110] env[68673]: DEBUG oslo_vmware.rw_handles [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2058.024304] env[68673]: DEBUG oslo_vmware.rw_handles [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2058.029396] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c670962c-43ac-437b-8f26-b9fb83232542 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.058660] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d8c9f4-8d96-486f-822b-b4399614015a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.065520] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfc5016-34eb-48c3-8fc9-88ae3640b7af {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.078431] env[68673]: DEBUG nova.compute.provider_tree [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2058.086818] env[68673]: DEBUG nova.scheduler.client.report [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2058.099466] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.297s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.099977] env[68673]: ERROR nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2058.099977] env[68673]: Faults: ['InvalidArgument'] [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Traceback (most recent call last): [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] self.driver.spawn(context, instance, image_meta, [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] self._fetch_image_if_missing(context, vi) [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] image_cache(vi, tmp_image_ds_loc) [ 2058.099977] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] vm_util.copy_virtual_disk( [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] session._wait_for_task(vmdk_copy_task) [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] return self.wait_for_task(task_ref) [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] return evt.wait() [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] result = hub.switch() [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] return self.greenlet.switch() [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2058.100418] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] self.f(*self.args, **self.kw) [ 2058.100833] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2058.100833] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] raise exceptions.translate_fault(task_info.error) [ 2058.100833] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2058.100833] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Faults: ['InvalidArgument'] [ 2058.100833] env[68673]: ERROR nova.compute.manager [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] [ 2058.100833] env[68673]: DEBUG nova.compute.utils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2058.102144] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Build of instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 was re-scheduled: A specified parameter was not correct: fileType [ 2058.102144] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2058.102532] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2058.102704] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2058.102875] env[68673]: DEBUG nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2058.103051] env[68673]: DEBUG nova.network.neutron [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2058.543886] env[68673]: DEBUG nova.network.neutron [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.556103] env[68673]: INFO nova.compute.manager [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Took 0.45 seconds to deallocate network for instance. [ 2058.638792] env[68673]: INFO nova.scheduler.client.report [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleted allocations for instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 [ 2058.658027] env[68673]: DEBUG oslo_concurrency.lockutils [None req-55e30102-2597-45db-9e4e-ede91e4d2f05 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.804s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.658282] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.330s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.658490] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.658690] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.658851] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.660696] env[68673]: INFO nova.compute.manager [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Terminating instance [ 2058.662392] env[68673]: DEBUG nova.compute.manager [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2058.662580] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2058.663044] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c5d39c3-2910-4d47-80e6-b3e5470eccd9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.671633] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1fc355-0d62-42d0-8896-f3bf57bec063 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.700475] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6f7698c-3a1d-47e7-aeac-fd0e50376a39 could not be found. [ 2058.700662] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2058.700836] env[68673]: INFO nova.compute.manager [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2058.701087] env[68673]: DEBUG oslo.service.loopingcall [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2058.701303] env[68673]: DEBUG nova.compute.manager [-] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2058.701395] env[68673]: DEBUG nova.network.neutron [-] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2058.725788] env[68673]: DEBUG nova.network.neutron [-] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.734435] env[68673]: INFO nova.compute.manager [-] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] Took 0.03 seconds to deallocate network for instance. [ 2058.824528] env[68673]: DEBUG oslo_concurrency.lockutils [None req-2e957f5c-4c54-45e8-8641-ac74889687f8 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.825412] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 229.704s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.825646] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: c6f7698c-3a1d-47e7-aeac-fd0e50376a39] During sync_power_state the instance has a pending task (deleting). Skip. [ 2058.825859] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "c6f7698c-3a1d-47e7-aeac-fd0e50376a39" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.730222] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "85ee6b4a-3c96-4be5-81d2-8b3ca661924e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.730575] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "85ee6b4a-3c96-4be5-81d2-8b3ca661924e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.742571] env[68673]: DEBUG nova.compute.manager [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2072.797498] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.797746] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.799589] env[68673]: INFO nova.compute.claims [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2072.947350] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29de4e79-ea6d-4f4e-b128-07490796c4f8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.954702] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f6636d-526e-4140-9de0-d216ae845190 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.984796] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b12713d-aadf-42db-958f-50de2a891cfa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.991693] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891f5305-afd0-46a0-8e06-7fc41d587d25 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.004489] env[68673]: DEBUG nova.compute.provider_tree [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2073.014076] env[68673]: DEBUG nova.scheduler.client.report [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2073.026881] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.229s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.027345] env[68673]: DEBUG nova.compute.manager [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2073.059187] env[68673]: DEBUG nova.compute.utils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2073.060545] env[68673]: DEBUG nova.compute.manager [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2073.060765] env[68673]: DEBUG nova.network.neutron [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2073.070161] env[68673]: DEBUG nova.compute.manager [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2073.123865] env[68673]: DEBUG nova.policy [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '920b26b61d494f269c05579970187435', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70890a0ed0804a319a83a84711371cee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 2073.133012] env[68673]: DEBUG nova.compute.manager [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2073.155831] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2073.156088] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2073.156249] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2073.156429] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2073.156575] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2073.156720] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2073.156922] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2073.157092] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2073.157258] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2073.157417] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2073.157584] env[68673]: DEBUG nova.virt.hardware [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2073.158489] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695b78ce-bd13-4e7a-8a9a-1be3d1d7c5f1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.167757] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38b4310-2c51-44dd-86a1-8f6fd434c093 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.430917] env[68673]: DEBUG nova.network.neutron [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Successfully created port: ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2074.068691] env[68673]: DEBUG nova.compute.manager [req-575df5a5-77e5-4838-a6e2-c582d85d6b9a req-3b8a1a92-8013-4899-8ed8-cae6c2fc94fe service nova] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Received event network-vif-plugged-ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2074.068948] env[68673]: DEBUG oslo_concurrency.lockutils [req-575df5a5-77e5-4838-a6e2-c582d85d6b9a req-3b8a1a92-8013-4899-8ed8-cae6c2fc94fe service nova] Acquiring lock "85ee6b4a-3c96-4be5-81d2-8b3ca661924e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.069142] env[68673]: DEBUG oslo_concurrency.lockutils [req-575df5a5-77e5-4838-a6e2-c582d85d6b9a req-3b8a1a92-8013-4899-8ed8-cae6c2fc94fe service nova] Lock "85ee6b4a-3c96-4be5-81d2-8b3ca661924e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.069314] env[68673]: DEBUG oslo_concurrency.lockutils [req-575df5a5-77e5-4838-a6e2-c582d85d6b9a req-3b8a1a92-8013-4899-8ed8-cae6c2fc94fe service nova] Lock "85ee6b4a-3c96-4be5-81d2-8b3ca661924e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.069480] env[68673]: DEBUG nova.compute.manager [req-575df5a5-77e5-4838-a6e2-c582d85d6b9a req-3b8a1a92-8013-4899-8ed8-cae6c2fc94fe service nova] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] No waiting events found dispatching network-vif-plugged-ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2074.069640] env[68673]: WARNING nova.compute.manager [req-575df5a5-77e5-4838-a6e2-c582d85d6b9a req-3b8a1a92-8013-4899-8ed8-cae6c2fc94fe service nova] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Received unexpected event network-vif-plugged-ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0 for instance with vm_state building and task_state spawning. [ 2074.149475] env[68673]: DEBUG nova.network.neutron [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Successfully updated port: ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2074.157226] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "refresh_cache-85ee6b4a-3c96-4be5-81d2-8b3ca661924e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2074.157359] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquired lock "refresh_cache-85ee6b4a-3c96-4be5-81d2-8b3ca661924e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2074.157501] env[68673]: DEBUG nova.network.neutron [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2074.197840] env[68673]: DEBUG nova.network.neutron [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2074.358865] env[68673]: DEBUG nova.network.neutron [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Updating instance_info_cache with network_info: [{"id": "ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0", "address": "fa:16:3e:f7:12:e0", "network": {"id": "4438a4bf-651c-4e90-9701-c12346e7119c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1742834654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70890a0ed0804a319a83a84711371cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea15cdaa-a8", "ovs_interfaceid": "ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2074.370849] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Releasing lock "refresh_cache-85ee6b4a-3c96-4be5-81d2-8b3ca661924e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2074.371142] env[68673]: DEBUG nova.compute.manager [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Instance network_info: |[{"id": "ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0", "address": "fa:16:3e:f7:12:e0", "network": {"id": "4438a4bf-651c-4e90-9701-c12346e7119c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1742834654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70890a0ed0804a319a83a84711371cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea15cdaa-a8", "ovs_interfaceid": "ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2074.371576] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:12:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2074.379150] env[68673]: DEBUG oslo.service.loopingcall [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2074.379576] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2074.379815] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5e8b2b1-6211-453f-991e-85eaee43fe22 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.401415] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2074.401415] env[68673]: value = "task-3433619" [ 2074.401415] env[68673]: _type = "Task" [ 2074.401415] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.162249] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433619, 'name': CreateVM_Task, 'duration_secs': 0.284158} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.162613] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2075.163020] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2075.163203] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2075.163508] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2075.163756] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6803d302-7c91-495b-b935-6a3cb7776dee {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.167846] env[68673]: DEBUG oslo_vmware.api [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Waiting for the task: (returnval){ [ 2075.167846] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52b3d2d0-a99b-19bb-b338-882cb8396967" [ 2075.167846] env[68673]: _type = "Task" [ 2075.167846] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.175177] env[68673]: DEBUG oslo_vmware.api [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52b3d2d0-a99b-19bb-b338-882cb8396967, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.678432] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2075.678705] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2075.678904] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8e300125-2959-421d-a293-7c91223761d0 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.096082] env[68673]: DEBUG nova.compute.manager [req-83785234-0c75-4af9-8944-e923508af3dc req-e414a44e-0211-4596-b65a-306d87404889 service nova] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Received event network-changed-ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2076.096273] env[68673]: DEBUG nova.compute.manager [req-83785234-0c75-4af9-8944-e923508af3dc req-e414a44e-0211-4596-b65a-306d87404889 service nova] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Refreshing instance network info cache due to event network-changed-ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2076.096481] env[68673]: DEBUG oslo_concurrency.lockutils [req-83785234-0c75-4af9-8944-e923508af3dc req-e414a44e-0211-4596-b65a-306d87404889 service nova] Acquiring lock "refresh_cache-85ee6b4a-3c96-4be5-81d2-8b3ca661924e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.096622] env[68673]: DEBUG oslo_concurrency.lockutils [req-83785234-0c75-4af9-8944-e923508af3dc req-e414a44e-0211-4596-b65a-306d87404889 service nova] Acquired lock "refresh_cache-85ee6b4a-3c96-4be5-81d2-8b3ca661924e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.096779] env[68673]: DEBUG nova.network.neutron [req-83785234-0c75-4af9-8944-e923508af3dc req-e414a44e-0211-4596-b65a-306d87404889 service nova] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Refreshing network info cache for port ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2076.334540] env[68673]: DEBUG nova.network.neutron [req-83785234-0c75-4af9-8944-e923508af3dc req-e414a44e-0211-4596-b65a-306d87404889 service nova] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Updated VIF entry in instance network info cache for port ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2076.334902] env[68673]: DEBUG nova.network.neutron [req-83785234-0c75-4af9-8944-e923508af3dc req-e414a44e-0211-4596-b65a-306d87404889 service nova] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Updating instance_info_cache with network_info: [{"id": "ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0", "address": "fa:16:3e:f7:12:e0", "network": {"id": "4438a4bf-651c-4e90-9701-c12346e7119c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1742834654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70890a0ed0804a319a83a84711371cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea15cdaa-a8", "ovs_interfaceid": "ea15cdaa-a88a-433f-bc02-ea65aa0eb0f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2076.344158] env[68673]: DEBUG oslo_concurrency.lockutils [req-83785234-0c75-4af9-8944-e923508af3dc req-e414a44e-0211-4596-b65a-306d87404889 service nova] Releasing lock "refresh_cache-85ee6b4a-3c96-4be5-81d2-8b3ca661924e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2094.784594] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2095.784450] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2095.784709] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2097.779520] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2098.784017] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2098.784329] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2098.784411] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2098.806921] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.807078] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.807207] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.807332] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.807450] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.807568] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.807685] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.807801] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.807918] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.808046] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2098.808165] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2098.808637] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2100.783737] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2101.784059] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2101.795536] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2101.795752] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.795914] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.796096] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2101.797189] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed92935-cece-443b-8705-942fbfa6deaf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.805994] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06cd939-5b1b-4a50-b822-e7d67e09ef65 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.819126] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2476c5e7-8ee3-4189-89d0-4fa81a8d73e8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.824997] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0904f9a-e6c4-4d12-b536-8b75740e7b17 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.853672] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180903MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2101.853791] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2101.853981] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.944091] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.944269] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.944395] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.944523] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.944641] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.944756] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.944868] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.944982] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.945114] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 07f2fc85-14df-4702-bf49-67d8ce8e9526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.945226] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 85ee6b4a-3c96-4be5-81d2-8b3ca661924e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2101.945410] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2101.945546] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2101.960411] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing inventories for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2101.973774] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Updating ProviderTree inventory for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2101.973952] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Updating inventory in ProviderTree for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2101.984183] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing aggregate associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, aggregates: None {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2102.000799] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Refreshing trait associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2102.107556] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd93a19-1eee-4054-b86e-523324eca072 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.114872] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6d079e-3cbc-418d-be6d-51356a0df58b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.144097] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987b297e-03a2-49ed-8484-507a3b35b93c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.150711] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4a5d98-2386-428d-bb7c-ba71d78ca153 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.164407] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2102.173802] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2102.186819] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2102.186985] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.333s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.187324] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.168452] env[68673]: WARNING oslo_vmware.rw_handles [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2104.168452] env[68673]: ERROR oslo_vmware.rw_handles [ 2104.170820] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2104.171358] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2104.171608] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Copying Virtual Disk [datastore1] vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/413f1cf4-06e1-4e68-b20e-2a7a7a29d8db/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2104.171890] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8f2a6e3-6fd8-4b4a-807c-95802ba315bf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.178990] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Waiting for the task: (returnval){ [ 2104.178990] env[68673]: value = "task-3433620" [ 2104.178990] env[68673]: _type = "Task" [ 2104.178990] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.186758] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Task: {'id': task-3433620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.689478] env[68673]: DEBUG oslo_vmware.exceptions [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2104.689909] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2104.690323] env[68673]: ERROR nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2104.690323] env[68673]: Faults: ['InvalidArgument'] [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Traceback (most recent call last): [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] yield resources [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] self.driver.spawn(context, instance, image_meta, [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] self._fetch_image_if_missing(context, vi) [ 2104.690323] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] image_cache(vi, tmp_image_ds_loc) [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] vm_util.copy_virtual_disk( [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] session._wait_for_task(vmdk_copy_task) [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] return self.wait_for_task(task_ref) [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] return evt.wait() [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] result = hub.switch() [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2104.690923] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] return self.greenlet.switch() [ 2104.691363] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2104.691363] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] self.f(*self.args, **self.kw) [ 2104.691363] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2104.691363] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] raise exceptions.translate_fault(task_info.error) [ 2104.691363] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2104.691363] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Faults: ['InvalidArgument'] [ 2104.691363] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] [ 2104.691363] env[68673]: INFO nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Terminating instance [ 2104.692202] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.692412] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2104.692650] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3b267e9-81b6-4ca8-9d3c-340b7fc2be81 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.694764] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2104.694958] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2104.695670] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243a490c-025b-43ff-8001-93a1888b2a66 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.702128] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2104.702338] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b6be18f-a989-4f88-bffe-c67bfe5dc68e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.704381] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2104.704558] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2104.705481] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02f4e69e-5d93-4d22-992c-55b77f4d758f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.709746] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Waiting for the task: (returnval){ [ 2104.709746] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52df1412-72a6-c985-85fa-7c9ce7f842dc" [ 2104.709746] env[68673]: _type = "Task" [ 2104.709746] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.719120] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52df1412-72a6-c985-85fa-7c9ce7f842dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.773362] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2104.773581] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2104.773770] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Deleting the datastore file [datastore1] 2a8badf2-c080-46dc-be89-4c73bb88cc01 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2104.774052] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be619efd-b300-4edc-b33f-ca49f3547952 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.780485] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Waiting for the task: (returnval){ [ 2104.780485] env[68673]: value = "task-3433622" [ 2104.780485] env[68673]: _type = "Task" [ 2104.780485] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.788069] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Task: {'id': task-3433622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.220449] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2105.220811] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Creating directory with path [datastore1] vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2105.221128] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2625036d-8e36-4531-ba3e-8d2ae1eba19b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.233250] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Created directory with path [datastore1] vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2105.233448] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Fetch image to [datastore1] vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2105.233617] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2105.234354] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0aacfd8-92c0-4313-a74e-b87fdb74a37a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.240825] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb5b994-2f99-4658-9f49-60f265999a19 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.249621] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040442e7-555a-4e77-b937-e64c7ef2b1e6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.280985] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19ba865-659e-431c-baa8-466b1348d338 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.291533] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8739e554-8826-4396-b138-93bd3bd460ab {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.293194] env[68673]: DEBUG oslo_vmware.api [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Task: {'id': task-3433622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062246} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.293433] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2105.293622] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2105.293772] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2105.293936] env[68673]: INFO nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2105.296060] env[68673]: DEBUG nova.compute.claims [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2105.296254] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.296468] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.313676] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2105.460802] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb4c74d-68e3-4ac8-80a1-9947d0463ffc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.465411] env[68673]: DEBUG oslo_vmware.rw_handles [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2105.469975] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50657ff-5730-4168-88ca-4be9b01ec547 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.553413] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38290679-9037-4ded-a8c7-e706ae706ee9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.556282] env[68673]: DEBUG oslo_vmware.rw_handles [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2105.556483] env[68673]: DEBUG oslo_vmware.rw_handles [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2105.561339] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fe5bf1-44af-496c-8a7f-ed986089d25a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.575563] env[68673]: DEBUG nova.compute.provider_tree [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2105.584380] env[68673]: DEBUG nova.scheduler.client.report [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2105.598380] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.302s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.598910] env[68673]: ERROR nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2105.598910] env[68673]: Faults: ['InvalidArgument'] [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Traceback (most recent call last): [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] self.driver.spawn(context, instance, image_meta, [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] self._fetch_image_if_missing(context, vi) [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] image_cache(vi, tmp_image_ds_loc) [ 2105.598910] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] vm_util.copy_virtual_disk( [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] session._wait_for_task(vmdk_copy_task) [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] return self.wait_for_task(task_ref) [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] return evt.wait() [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] result = hub.switch() [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] return self.greenlet.switch() [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2105.599311] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] self.f(*self.args, **self.kw) [ 2105.599689] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2105.599689] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] raise exceptions.translate_fault(task_info.error) [ 2105.599689] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2105.599689] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Faults: ['InvalidArgument'] [ 2105.599689] env[68673]: ERROR nova.compute.manager [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] [ 2105.599689] env[68673]: DEBUG nova.compute.utils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2105.601011] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Build of instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 was re-scheduled: A specified parameter was not correct: fileType [ 2105.601011] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2105.601395] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2105.601567] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2105.601738] env[68673]: DEBUG nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2105.601896] env[68673]: DEBUG nova.network.neutron [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2105.783874] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2105.925030] env[68673]: DEBUG nova.network.neutron [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2105.940201] env[68673]: INFO nova.compute.manager [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Took 0.34 seconds to deallocate network for instance. [ 2106.041166] env[68673]: INFO nova.scheduler.client.report [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Deleted allocations for instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 [ 2106.068700] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9e1e8cda-fd5e-4f48-8d98-6a636a2d0ca4 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 674.134s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.068976] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 477.840s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.069664] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Acquiring lock "2a8badf2-c080-46dc-be89-4c73bb88cc01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.069664] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.069664] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.071693] env[68673]: INFO nova.compute.manager [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Terminating instance [ 2106.073733] env[68673]: DEBUG nova.compute.manager [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2106.076219] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2106.076219] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91c65a52-ba8c-4dd1-a071-9d8375b101d4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.086650] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59a87a8-1168-423b-a621-67d05f1d8047 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.115144] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2a8badf2-c080-46dc-be89-4c73bb88cc01 could not be found. [ 2106.115356] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2106.115537] env[68673]: INFO nova.compute.manager [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2106.115782] env[68673]: DEBUG oslo.service.loopingcall [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2106.116017] env[68673]: DEBUG nova.compute.manager [-] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2106.116121] env[68673]: DEBUG nova.network.neutron [-] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2106.142516] env[68673]: DEBUG nova.network.neutron [-] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2106.151442] env[68673]: INFO nova.compute.manager [-] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] Took 0.03 seconds to deallocate network for instance. [ 2106.242857] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8a25d567-0b5c-4bfe-91be-62102c783116 tempest-ImagesOneServerTestJSON-707332163 tempest-ImagesOneServerTestJSON-707332163-project-member] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.243922] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 277.122s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.244194] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2a8badf2-c080-46dc-be89-4c73bb88cc01] During sync_power_state the instance has a pending task (deleting). Skip. [ 2106.244354] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "2a8badf2-c080-46dc-be89-4c73bb88cc01" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.784583] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.792128] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.792550] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances with incomplete migration {{(pid=68673) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2119.792800] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2119.793189] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2119.803723] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] There are 0 instances to clean {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2133.370830] env[68673]: DEBUG oslo_concurrency.lockutils [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2154.186613] env[68673]: WARNING oslo_vmware.rw_handles [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2154.186613] env[68673]: ERROR oslo_vmware.rw_handles [ 2154.187382] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2154.188988] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2154.189254] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Copying Virtual Disk [datastore1] vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/e6c753a6-d2d3-4a45-a557-69c3ffe5d9de/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2154.189540] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-971a1c0c-034c-4562-bff3-6a12bfe6c7cf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.197316] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Waiting for the task: (returnval){ [ 2154.197316] env[68673]: value = "task-3433623" [ 2154.197316] env[68673]: _type = "Task" [ 2154.197316] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2154.205059] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Task: {'id': task-3433623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2154.707882] env[68673]: DEBUG oslo_vmware.exceptions [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2154.708294] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2154.708837] env[68673]: ERROR nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2154.708837] env[68673]: Faults: ['InvalidArgument'] [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Traceback (most recent call last): [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] yield resources [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] self.driver.spawn(context, instance, image_meta, [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] self._fetch_image_if_missing(context, vi) [ 2154.708837] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] image_cache(vi, tmp_image_ds_loc) [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] vm_util.copy_virtual_disk( [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] session._wait_for_task(vmdk_copy_task) [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] return self.wait_for_task(task_ref) [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] return evt.wait() [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] result = hub.switch() [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2154.709300] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] return self.greenlet.switch() [ 2154.709773] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2154.709773] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] self.f(*self.args, **self.kw) [ 2154.709773] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2154.709773] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] raise exceptions.translate_fault(task_info.error) [ 2154.709773] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2154.709773] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Faults: ['InvalidArgument'] [ 2154.709773] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] [ 2154.709773] env[68673]: INFO nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Terminating instance [ 2154.711473] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2154.711473] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2154.711473] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80d13784-e450-4d76-8c74-f199b33a8704 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.713494] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2154.713700] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2154.714426] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25df6f5-119e-4345-b71a-fd4fd696e0e9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.721324] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2154.721585] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcaa89a4-f6b4-4247-a137-871598e3f2a9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.723564] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2154.723739] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2154.724646] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b07d217d-0dd6-404b-bb7c-2bae40d3a2dc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.729080] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 2154.729080] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]520416d5-105d-4f27-aadc-b4df812cf4c1" [ 2154.729080] env[68673]: _type = "Task" [ 2154.729080] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2154.741737] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]520416d5-105d-4f27-aadc-b4df812cf4c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2154.788156] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2154.788388] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2154.788569] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Deleting the datastore file [datastore1] 3352e87c-38dd-4bfa-937c-644abc30cf76 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2154.788827] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-149d2923-343c-4c05-832c-371b03a1f68b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.794891] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Waiting for the task: (returnval){ [ 2154.794891] env[68673]: value = "task-3433625" [ 2154.794891] env[68673]: _type = "Task" [ 2154.794891] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2154.802942] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Task: {'id': task-3433625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2155.239725] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2155.240055] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating directory with path [datastore1] vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2155.240332] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bca0bee9-52cf-4bbd-8d78-033aed1eb616 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.251903] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Created directory with path [datastore1] vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2155.252135] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Fetch image to [datastore1] vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2155.252318] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2155.253015] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2a7b7c-1146-4502-99f8-88c3589b0a0d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.259241] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1896a3ef-0cc1-4885-91a1-dbd0cd4eb070 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.268041] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ccfd5c-9972-4106-8b69-d81a684c41fa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.299905] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ff62df-e062-4e6c-83e1-3a3bf3a80ffa {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.306674] env[68673]: DEBUG oslo_vmware.api [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Task: {'id': task-3433625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073449} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2155.307976] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2155.308177] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2155.308349] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2155.308518] env[68673]: INFO nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2155.310269] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d7d0449b-1ab2-462e-8734-8853d3438e7b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.312068] env[68673]: DEBUG nova.compute.claims [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2155.312242] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2155.312461] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2155.333017] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2155.383993] env[68673]: DEBUG oslo_vmware.rw_handles [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2155.445800] env[68673]: DEBUG oslo_vmware.rw_handles [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2155.445931] env[68673]: DEBUG oslo_vmware.rw_handles [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2155.514835] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572ccc9c-e494-4cd8-80f8-6abb369f4c75 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.522149] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2171fafb-87f4-4b31-b839-cfeb6a018d23 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.552744] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d12c96a-65d0-47d5-a10d-93b36a46f57f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.559203] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bfe9a5-e69f-4e9a-8285-b3434f6dba17 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.571798] env[68673]: DEBUG nova.compute.provider_tree [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2155.580505] env[68673]: DEBUG nova.scheduler.client.report [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2155.594249] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.282s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2155.594763] env[68673]: ERROR nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2155.594763] env[68673]: Faults: ['InvalidArgument'] [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Traceback (most recent call last): [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] self.driver.spawn(context, instance, image_meta, [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] self._fetch_image_if_missing(context, vi) [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] image_cache(vi, tmp_image_ds_loc) [ 2155.594763] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] vm_util.copy_virtual_disk( [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] session._wait_for_task(vmdk_copy_task) [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] return self.wait_for_task(task_ref) [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] return evt.wait() [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] result = hub.switch() [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] return self.greenlet.switch() [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2155.595216] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] self.f(*self.args, **self.kw) [ 2155.595686] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2155.595686] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] raise exceptions.translate_fault(task_info.error) [ 2155.595686] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2155.595686] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Faults: ['InvalidArgument'] [ 2155.595686] env[68673]: ERROR nova.compute.manager [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] [ 2155.595686] env[68673]: DEBUG nova.compute.utils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2155.597444] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Build of instance 3352e87c-38dd-4bfa-937c-644abc30cf76 was re-scheduled: A specified parameter was not correct: fileType [ 2155.597444] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2155.597811] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2155.597986] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2155.598175] env[68673]: DEBUG nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2155.598339] env[68673]: DEBUG nova.network.neutron [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2155.795359] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.122707] env[68673]: DEBUG nova.network.neutron [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2156.138569] env[68673]: INFO nova.compute.manager [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Took 0.54 seconds to deallocate network for instance. [ 2156.267034] env[68673]: INFO nova.scheduler.client.report [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Deleted allocations for instance 3352e87c-38dd-4bfa-937c-644abc30cf76 [ 2156.294162] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b31e68f5-44e3-4027-b950-1a199847693a tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 659.335s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.294442] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 463.399s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2156.294661] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Acquiring lock "3352e87c-38dd-4bfa-937c-644abc30cf76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2156.295016] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2156.295199] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.298026] env[68673]: INFO nova.compute.manager [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Terminating instance [ 2156.299940] env[68673]: DEBUG nova.compute.manager [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2156.300540] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2156.300540] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5dab1511-f251-4ef1-9f8f-151bbd219a4e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.310385] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be058a2-8125-4b0b-a638-5d44a5101b1d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.338638] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3352e87c-38dd-4bfa-937c-644abc30cf76 could not be found. [ 2156.338849] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2156.339067] env[68673]: INFO nova.compute.manager [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2156.339312] env[68673]: DEBUG oslo.service.loopingcall [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2156.339533] env[68673]: DEBUG nova.compute.manager [-] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2156.339627] env[68673]: DEBUG nova.network.neutron [-] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2156.365370] env[68673]: DEBUG nova.network.neutron [-] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2156.377108] env[68673]: INFO nova.compute.manager [-] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] Took 0.04 seconds to deallocate network for instance. [ 2156.460558] env[68673]: DEBUG oslo_concurrency.lockutils [None req-4ae83931-5023-4fec-8659-a494b9113f07 tempest-ImagesNegativeTestJSON-681307171 tempest-ImagesNegativeTestJSON-681307171-project-member] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.462141] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 327.340s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2156.462354] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3352e87c-38dd-4bfa-937c-644abc30cf76] During sync_power_state the instance has a pending task (deleting). Skip. [ 2156.462528] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "3352e87c-38dd-4bfa-937c-644abc30cf76" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.783853] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.783994] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2158.778989] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2158.779335] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2158.800506] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2158.800671] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2158.800947] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2158.819067] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2158.819243] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2158.819650] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2158.819650] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2158.819650] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2158.819793] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2158.819836] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2158.819945] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2158.820078] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2158.820526] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.296712] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquiring lock "4000d9e1-c566-4b4f-be56-eacaafa0a0a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.297026] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Lock "4000d9e1-c566-4b4f-be56-eacaafa0a0a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.307781] env[68673]: DEBUG nova.compute.manager [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2161.355856] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.356129] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.357594] env[68673]: INFO nova.compute.claims [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2161.511114] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9107458-f401-4634-99a5-840232102117 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.518590] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17498e4d-a0a1-4da6-b17b-b25245d32417 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.548788] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baeaa2c3-9898-4399-8183-70436d5369c5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.555561] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6047f4-162d-4530-8b76-6ddf077f1923 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.568364] env[68673]: DEBUG nova.compute.provider_tree [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2161.576651] env[68673]: DEBUG nova.scheduler.client.report [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2161.589322] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.233s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.589795] env[68673]: DEBUG nova.compute.manager [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2161.622935] env[68673]: DEBUG nova.compute.utils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2161.625517] env[68673]: DEBUG nova.compute.manager [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2161.625517] env[68673]: DEBUG nova.network.neutron [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2161.633956] env[68673]: DEBUG nova.compute.manager [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2161.693061] env[68673]: DEBUG nova.policy [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca7c5d121938457eb61fcd00b2458283', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39e966f4c16a46a28aec0602956be356', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 2161.708753] env[68673]: DEBUG nova.compute.manager [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2161.732458] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2161.732720] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2161.732881] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2161.733116] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2161.733288] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2161.733441] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2161.733659] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2161.733824] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2161.734011] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2161.734206] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2161.734382] env[68673]: DEBUG nova.virt.hardware [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2161.735296] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e8d754-c28a-426b-bca3-0dce31e18095 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.744397] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e019a66c-6817-434d-a791-5e2c2a887de9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.783038] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.783311] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.794844] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.795122] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.795311] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.795493] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2161.796562] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040387ec-f310-4a5d-9c66-331184817226 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.806193] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3338b3dd-bb6d-4ee3-9f41-0718d8af4f31 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.829614] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50348c6-de2a-48d5-b704-350604c35259 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.836075] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd0ed0a-e1f0-4582-8bd3-3afb1edf9943 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.864968] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180898MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2161.865316] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.865316] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.943602] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance d79f254c-7c00-4cf8-85ac-6db513533da3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.943915] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.943915] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.944121] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.944121] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.944242] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.944360] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 07f2fc85-14df-4702-bf49-67d8ce8e9526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.944478] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 85ee6b4a-3c96-4be5-81d2-8b3ca661924e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.944590] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4000d9e1-c566-4b4f-be56-eacaafa0a0a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2161.944913] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2161.944913] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2162.067809] env[68673]: DEBUG nova.network.neutron [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Successfully created port: e515dcfa-70b4-4cc3-b068-9fe22acd254f {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2162.091579] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8d5845-9f38-4ac0-b8b4-bcd0af617281 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.100023] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f3ee3b-2e0f-44b8-b5d4-9927c7fc8d2b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.134555] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73fd3da-9d20-45fa-b1b4-237d13f34e7f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.141577] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9986e5e-459f-4f61-b355-a4a4a98b4819 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.166819] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2162.176464] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2162.192766] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2162.192766] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.327s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.645942] env[68673]: DEBUG nova.compute.manager [req-5d831fa8-2bc7-4ce5-bb9b-480b36f0bc70 req-276be4eb-21a5-44f3-8832-a00da633758b service nova] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Received event network-vif-plugged-e515dcfa-70b4-4cc3-b068-9fe22acd254f {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2162.646225] env[68673]: DEBUG oslo_concurrency.lockutils [req-5d831fa8-2bc7-4ce5-bb9b-480b36f0bc70 req-276be4eb-21a5-44f3-8832-a00da633758b service nova] Acquiring lock "4000d9e1-c566-4b4f-be56-eacaafa0a0a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.646390] env[68673]: DEBUG oslo_concurrency.lockutils [req-5d831fa8-2bc7-4ce5-bb9b-480b36f0bc70 req-276be4eb-21a5-44f3-8832-a00da633758b service nova] Lock "4000d9e1-c566-4b4f-be56-eacaafa0a0a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.646556] env[68673]: DEBUG oslo_concurrency.lockutils [req-5d831fa8-2bc7-4ce5-bb9b-480b36f0bc70 req-276be4eb-21a5-44f3-8832-a00da633758b service nova] Lock "4000d9e1-c566-4b4f-be56-eacaafa0a0a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.646721] env[68673]: DEBUG nova.compute.manager [req-5d831fa8-2bc7-4ce5-bb9b-480b36f0bc70 req-276be4eb-21a5-44f3-8832-a00da633758b service nova] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] No waiting events found dispatching network-vif-plugged-e515dcfa-70b4-4cc3-b068-9fe22acd254f {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2162.646883] env[68673]: WARNING nova.compute.manager [req-5d831fa8-2bc7-4ce5-bb9b-480b36f0bc70 req-276be4eb-21a5-44f3-8832-a00da633758b service nova] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Received unexpected event network-vif-plugged-e515dcfa-70b4-4cc3-b068-9fe22acd254f for instance with vm_state building and task_state spawning. [ 2162.728027] env[68673]: DEBUG nova.network.neutron [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Successfully updated port: e515dcfa-70b4-4cc3-b068-9fe22acd254f {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2162.738766] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquiring lock "refresh_cache-4000d9e1-c566-4b4f-be56-eacaafa0a0a1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2162.738964] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquired lock "refresh_cache-4000d9e1-c566-4b4f-be56-eacaafa0a0a1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2162.739146] env[68673]: DEBUG nova.network.neutron [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2162.797244] env[68673]: DEBUG nova.network.neutron [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2162.998316] env[68673]: DEBUG nova.network.neutron [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Updating instance_info_cache with network_info: [{"id": "e515dcfa-70b4-4cc3-b068-9fe22acd254f", "address": "fa:16:3e:da:00:ef", "network": {"id": "fe13cc62-1c69-44f3-8c6b-0babd9942dc9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-103727199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e966f4c16a46a28aec0602956be356", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape515dcfa-70", "ovs_interfaceid": "e515dcfa-70b4-4cc3-b068-9fe22acd254f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2163.012412] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Releasing lock "refresh_cache-4000d9e1-c566-4b4f-be56-eacaafa0a0a1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.012412] env[68673]: DEBUG nova.compute.manager [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Instance network_info: |[{"id": "e515dcfa-70b4-4cc3-b068-9fe22acd254f", "address": "fa:16:3e:da:00:ef", "network": {"id": "fe13cc62-1c69-44f3-8c6b-0babd9942dc9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-103727199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e966f4c16a46a28aec0602956be356", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape515dcfa-70", "ovs_interfaceid": "e515dcfa-70b4-4cc3-b068-9fe22acd254f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2163.012641] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:00:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e515dcfa-70b4-4cc3-b068-9fe22acd254f', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2163.019453] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Creating folder: Project (39e966f4c16a46a28aec0602956be356). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2163.019955] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3796ef41-a51c-4c7b-85c8-ca190b07ec40 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.030360] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Created folder: Project (39e966f4c16a46a28aec0602956be356) in parent group-v685311. [ 2163.030541] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Creating folder: Instances. Parent ref: group-v685418. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2163.030755] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8298c49f-d32b-492f-8b09-45e256f30244 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.039346] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Created folder: Instances in parent group-v685418. [ 2163.039573] env[68673]: DEBUG oslo.service.loopingcall [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2163.039744] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2163.039928] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4069a14-b961-4c54-a725-d8559b13ec93 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.058497] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2163.058497] env[68673]: value = "task-3433628" [ 2163.058497] env[68673]: _type = "Task" [ 2163.058497] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.066739] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433628, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.192221] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2163.568303] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433628, 'name': CreateVM_Task, 'duration_secs': 0.291977} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.568553] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2163.569284] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2163.569526] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2163.569888] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2163.570194] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d08d816b-e7d1-478e-84f0-8b60c3af0b7b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.574788] env[68673]: DEBUG oslo_vmware.api [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Waiting for the task: (returnval){ [ 2163.574788] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5274b666-6ca5-18d6-c457-620e9a7f2254" [ 2163.574788] env[68673]: _type = "Task" [ 2163.574788] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.582237] env[68673]: DEBUG oslo_vmware.api [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5274b666-6ca5-18d6-c457-620e9a7f2254, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.085248] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2164.085589] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2164.085713] env[68673]: DEBUG oslo_concurrency.lockutils [None req-130cf024-3da6-4f0f-a8cd-29ac39f98b0f tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.675942] env[68673]: DEBUG nova.compute.manager [req-c92831c2-4027-4061-bc46-441dc199d8f5 req-a26b25ec-bf0b-4b22-b758-0ffbb12546be service nova] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Received event network-changed-e515dcfa-70b4-4cc3-b068-9fe22acd254f {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2164.676159] env[68673]: DEBUG nova.compute.manager [req-c92831c2-4027-4061-bc46-441dc199d8f5 req-a26b25ec-bf0b-4b22-b758-0ffbb12546be service nova] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Refreshing instance network info cache due to event network-changed-e515dcfa-70b4-4cc3-b068-9fe22acd254f. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2164.676370] env[68673]: DEBUG oslo_concurrency.lockutils [req-c92831c2-4027-4061-bc46-441dc199d8f5 req-a26b25ec-bf0b-4b22-b758-0ffbb12546be service nova] Acquiring lock "refresh_cache-4000d9e1-c566-4b4f-be56-eacaafa0a0a1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.676512] env[68673]: DEBUG oslo_concurrency.lockutils [req-c92831c2-4027-4061-bc46-441dc199d8f5 req-a26b25ec-bf0b-4b22-b758-0ffbb12546be service nova] Acquired lock "refresh_cache-4000d9e1-c566-4b4f-be56-eacaafa0a0a1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.676669] env[68673]: DEBUG nova.network.neutron [req-c92831c2-4027-4061-bc46-441dc199d8f5 req-a26b25ec-bf0b-4b22-b758-0ffbb12546be service nova] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Refreshing network info cache for port e515dcfa-70b4-4cc3-b068-9fe22acd254f {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2165.087989] env[68673]: DEBUG nova.network.neutron [req-c92831c2-4027-4061-bc46-441dc199d8f5 req-a26b25ec-bf0b-4b22-b758-0ffbb12546be service nova] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Updated VIF entry in instance network info cache for port e515dcfa-70b4-4cc3-b068-9fe22acd254f. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2165.088402] env[68673]: DEBUG nova.network.neutron [req-c92831c2-4027-4061-bc46-441dc199d8f5 req-a26b25ec-bf0b-4b22-b758-0ffbb12546be service nova] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Updating instance_info_cache with network_info: [{"id": "e515dcfa-70b4-4cc3-b068-9fe22acd254f", "address": "fa:16:3e:da:00:ef", "network": {"id": "fe13cc62-1c69-44f3-8c6b-0babd9942dc9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-103727199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e966f4c16a46a28aec0602956be356", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape515dcfa-70", "ovs_interfaceid": "e515dcfa-70b4-4cc3-b068-9fe22acd254f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.097364] env[68673]: DEBUG oslo_concurrency.lockutils [req-c92831c2-4027-4061-bc46-441dc199d8f5 req-a26b25ec-bf0b-4b22-b758-0ffbb12546be service nova] Releasing lock "refresh_cache-4000d9e1-c566-4b4f-be56-eacaafa0a0a1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2165.420618] env[68673]: DEBUG oslo_concurrency.lockutils [None req-62a1a71d-ab32-4d3c-b03b-e2dd238bf54d tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "07f2fc85-14df-4702-bf49-67d8ce8e9526" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.783324] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.194345] env[68673]: WARNING oslo_vmware.rw_handles [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2202.194345] env[68673]: ERROR oslo_vmware.rw_handles [ 2202.195060] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2202.196918] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2202.197238] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Copying Virtual Disk [datastore1] vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/85bfb145-4461-448c-8578-2b263b1a4624/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2202.197585] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41e81ad8-3ab5-4998-9ce1-8553db559716 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.206404] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 2202.206404] env[68673]: value = "task-3433629" [ 2202.206404] env[68673]: _type = "Task" [ 2202.206404] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.214355] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433629, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.717273] env[68673]: DEBUG oslo_vmware.exceptions [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2202.717581] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2202.718161] env[68673]: ERROR nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2202.718161] env[68673]: Faults: ['InvalidArgument'] [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Traceback (most recent call last): [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] yield resources [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] self.driver.spawn(context, instance, image_meta, [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] self._fetch_image_if_missing(context, vi) [ 2202.718161] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] image_cache(vi, tmp_image_ds_loc) [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] vm_util.copy_virtual_disk( [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] session._wait_for_task(vmdk_copy_task) [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] return self.wait_for_task(task_ref) [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] return evt.wait() [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] result = hub.switch() [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2202.718686] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] return self.greenlet.switch() [ 2202.719216] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2202.719216] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] self.f(*self.args, **self.kw) [ 2202.719216] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2202.719216] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] raise exceptions.translate_fault(task_info.error) [ 2202.719216] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2202.719216] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Faults: ['InvalidArgument'] [ 2202.719216] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] [ 2202.719216] env[68673]: INFO nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Terminating instance [ 2202.720150] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.720407] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2202.720646] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84413c81-37af-4e1f-b197-28eb1279d8bd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.723612] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2202.723809] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2202.724538] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696d0f01-8551-4d53-9520-edec1770a5b9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.731041] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2202.731250] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e34cf5e-b221-44f2-af4f-bac7e00354fd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.733405] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2202.733577] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2202.734647] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0e440f1-41cc-4dda-8460-51474d53ab01 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.739703] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 2202.739703] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52719c07-f549-f22a-43a4-e7cd55c62c86" [ 2202.739703] env[68673]: _type = "Task" [ 2202.739703] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.746796] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52719c07-f549-f22a-43a4-e7cd55c62c86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.802947] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2202.803188] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2202.803386] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleting the datastore file [datastore1] d79f254c-7c00-4cf8-85ac-6db513533da3 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2202.803631] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a68c0d14-1ea7-4f0b-8fb1-11942d7cfa4d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.809380] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 2202.809380] env[68673]: value = "task-3433631" [ 2202.809380] env[68673]: _type = "Task" [ 2202.809380] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.817129] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.250664] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2203.250940] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating directory with path [datastore1] vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2203.251174] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90e620c5-6354-4e22-ba9f-8e4b488eb38e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.263446] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Created directory with path [datastore1] vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2203.263634] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Fetch image to [datastore1] vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2203.263818] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2203.264537] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0745e6-9645-43fb-b272-54870b8638f8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.271011] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1070e9a5-c004-4135-a5f6-6717bca073ef {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.281246] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8b827a-868c-4a3e-a46f-a07e401b37b1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.314060] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c07fc79-9626-4558-b891-d82664e0b30f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.320981] env[68673]: DEBUG oslo_vmware.api [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073239} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.322561] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2203.322752] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2203.322942] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2203.323145] env[68673]: INFO nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2203.324905] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7790f720-b331-445b-90c1-f94de3c68ddd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.326736] env[68673]: DEBUG nova.compute.claims [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2203.326912] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2203.327147] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2203.348541] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2203.406220] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2203.468850] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2203.468850] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2203.542660] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb575141-6c3b-4a40-9c0d-4d77c5e5c1cb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.550199] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a42bf46-91b3-437e-ab45-2fb495230dc7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.580604] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4d2135-d4bc-4c55-9019-f5d12a8dcdae {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.587027] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13759897-a7f1-48b1-9d8a-7ae6b219c150 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.599366] env[68673]: DEBUG nova.compute.provider_tree [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2203.607487] env[68673]: DEBUG nova.scheduler.client.report [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2203.620723] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.294s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2203.621237] env[68673]: ERROR nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2203.621237] env[68673]: Faults: ['InvalidArgument'] [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Traceback (most recent call last): [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] self.driver.spawn(context, instance, image_meta, [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] self._fetch_image_if_missing(context, vi) [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] image_cache(vi, tmp_image_ds_loc) [ 2203.621237] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] vm_util.copy_virtual_disk( [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] session._wait_for_task(vmdk_copy_task) [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] return self.wait_for_task(task_ref) [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] return evt.wait() [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] result = hub.switch() [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] return self.greenlet.switch() [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2203.621708] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] self.f(*self.args, **self.kw) [ 2203.622193] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2203.622193] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] raise exceptions.translate_fault(task_info.error) [ 2203.622193] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2203.622193] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Faults: ['InvalidArgument'] [ 2203.622193] env[68673]: ERROR nova.compute.manager [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] [ 2203.622193] env[68673]: DEBUG nova.compute.utils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2203.623260] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Build of instance d79f254c-7c00-4cf8-85ac-6db513533da3 was re-scheduled: A specified parameter was not correct: fileType [ 2203.623260] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2203.623632] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2203.623805] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2203.623978] env[68673]: DEBUG nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2203.624159] env[68673]: DEBUG nova.network.neutron [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2203.870609] env[68673]: DEBUG nova.network.neutron [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2203.890302] env[68673]: INFO nova.compute.manager [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Took 0.27 seconds to deallocate network for instance. [ 2203.979405] env[68673]: INFO nova.scheduler.client.report [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleted allocations for instance d79f254c-7c00-4cf8-85ac-6db513533da3 [ 2203.999841] env[68673]: DEBUG oslo_concurrency.lockutils [None req-efa5807b-b6c3-47a5-9985-fa2e24320a1f tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 658.469s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.000133] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 462.739s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.000352] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "d79f254c-7c00-4cf8-85ac-6db513533da3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.000565] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.000731] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.002696] env[68673]: INFO nova.compute.manager [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Terminating instance [ 2204.004420] env[68673]: DEBUG nova.compute.manager [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2204.004610] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2204.005087] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bd02746-c8d5-4018-981c-0fe219699907 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.014490] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c067cc-7711-4f62-9a19-30ca0587b4e8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.042403] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d79f254c-7c00-4cf8-85ac-6db513533da3 could not be found. [ 2204.042606] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2204.042779] env[68673]: INFO nova.compute.manager [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2204.043028] env[68673]: DEBUG oslo.service.loopingcall [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2204.043244] env[68673]: DEBUG nova.compute.manager [-] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2204.043339] env[68673]: DEBUG nova.network.neutron [-] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2204.065872] env[68673]: DEBUG nova.network.neutron [-] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.073662] env[68673]: INFO nova.compute.manager [-] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] Took 0.03 seconds to deallocate network for instance. [ 2204.154274] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e0c46f4b-b738-4ce0-975e-e3e3a97f6b87 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.154s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.155077] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 375.033s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.155278] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: d79f254c-7c00-4cf8-85ac-6db513533da3] During sync_power_state the instance has a pending task (deleting). Skip. [ 2204.155452] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "d79f254c-7c00-4cf8-85ac-6db513533da3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.624464] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "3277e704-c970-4482-a812-f02e297f99d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.625257] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "3277e704-c970-4482-a812-f02e297f99d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.639047] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2214.666931] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "643c6d20-b3b9-440d-82f2-7c09a609717d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.667193] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "643c6d20-b3b9-440d-82f2-7c09a609717d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.682270] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2214.736353] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "f7b5663b-1c63-47d9-85dc-59a47a82d5b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.736652] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "f7b5663b-1c63-47d9-85dc-59a47a82d5b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.741103] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.741367] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.742841] env[68673]: INFO nova.compute.claims [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2214.761597] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.944914] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efbec4b-bca9-4c0c-9f0e-971dd95400cd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.952927] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231e7363-5e84-4fab-b1b6-5d7332f8fdd5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.985609] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7eb79d-2d3f-4100-bf2a-6a332883b5b4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.993351] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b657b50-8b47-441c-a7c3-07935f9ecb0f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.007206] env[68673]: DEBUG nova.compute.provider_tree [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2215.018360] env[68673]: DEBUG nova.scheduler.client.report [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2215.034033] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.293s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.034534] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2215.036858] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.275s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.038214] env[68673]: INFO nova.compute.claims [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2215.073195] env[68673]: DEBUG nova.compute.utils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2215.076103] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2215.076270] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2215.085303] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2215.148860] env[68673]: DEBUG nova.policy [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37d5217b19814e26a011ef8dc48792b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '452e234cb78e44f5b2c1fe13e8b207b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 2215.153746] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2215.184099] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2215.184380] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2215.184541] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2215.184725] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2215.184873] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2215.185102] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2215.185425] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2215.185521] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2215.185690] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2215.185938] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2215.186126] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2215.186967] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16981d8a-eb25-4758-8f48-c84dfe3662cd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.197847] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28a30c8-f839-4f05-b34a-ee485530eb66 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.248941] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a924d4db-01e1-4f77-8f54-08bf77e0c906 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.256494] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a536a81d-e018-40a9-aa03-34de0c46ac8d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.287740] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0475a5e8-3e27-4676-9aa8-7395b070188a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.295185] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2046abd3-bbf6-4fe3-98fb-6251e60955e9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.309524] env[68673]: DEBUG nova.compute.provider_tree [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2215.318590] env[68673]: DEBUG nova.scheduler.client.report [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2215.331732] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.295s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.332191] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2215.367290] env[68673]: DEBUG nova.compute.utils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2215.368732] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2215.368895] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2215.383635] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2215.421658] env[68673]: DEBUG nova.policy [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37d5217b19814e26a011ef8dc48792b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '452e234cb78e44f5b2c1fe13e8b207b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 2215.475943] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2215.481962] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Successfully created port: 18270d0c-02c9-426c-8584-c6d71d03a5e9 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2215.504639] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2215.504878] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2215.505040] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2215.505222] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2215.505363] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2215.505503] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2215.505717] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2215.505871] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2215.506045] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2215.506372] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2215.506372] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2215.507341] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f237de7-4166-44fd-a1dd-be82ae9cc765 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.516081] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0cbfa4-36c8-4c56-9b6a-573c04022478 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.807477] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Successfully created port: 25a56a44-0674-4327-a5ba-d265bf1a3d95 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2216.145932] env[68673]: DEBUG nova.compute.manager [req-7f1f312b-9b64-4327-8297-88b7ebdfe65b req-308021a0-dfaa-4d0c-a816-3a885d44b29a service nova] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Received event network-vif-plugged-18270d0c-02c9-426c-8584-c6d71d03a5e9 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2216.146182] env[68673]: DEBUG oslo_concurrency.lockutils [req-7f1f312b-9b64-4327-8297-88b7ebdfe65b req-308021a0-dfaa-4d0c-a816-3a885d44b29a service nova] Acquiring lock "3277e704-c970-4482-a812-f02e297f99d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.146398] env[68673]: DEBUG oslo_concurrency.lockutils [req-7f1f312b-9b64-4327-8297-88b7ebdfe65b req-308021a0-dfaa-4d0c-a816-3a885d44b29a service nova] Lock "3277e704-c970-4482-a812-f02e297f99d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.146566] env[68673]: DEBUG oslo_concurrency.lockutils [req-7f1f312b-9b64-4327-8297-88b7ebdfe65b req-308021a0-dfaa-4d0c-a816-3a885d44b29a service nova] Lock "3277e704-c970-4482-a812-f02e297f99d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.146736] env[68673]: DEBUG nova.compute.manager [req-7f1f312b-9b64-4327-8297-88b7ebdfe65b req-308021a0-dfaa-4d0c-a816-3a885d44b29a service nova] [instance: 3277e704-c970-4482-a812-f02e297f99d1] No waiting events found dispatching network-vif-plugged-18270d0c-02c9-426c-8584-c6d71d03a5e9 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2216.146900] env[68673]: WARNING nova.compute.manager [req-7f1f312b-9b64-4327-8297-88b7ebdfe65b req-308021a0-dfaa-4d0c-a816-3a885d44b29a service nova] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Received unexpected event network-vif-plugged-18270d0c-02c9-426c-8584-c6d71d03a5e9 for instance with vm_state building and task_state spawning. [ 2216.211071] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Successfully updated port: 18270d0c-02c9-426c-8584-c6d71d03a5e9 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2216.221468] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "refresh_cache-3277e704-c970-4482-a812-f02e297f99d1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2216.221618] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired lock "refresh_cache-3277e704-c970-4482-a812-f02e297f99d1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2216.221770] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2216.272204] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2216.375453] env[68673]: DEBUG nova.compute.manager [req-a5b3df41-8ae4-4c08-8988-0debac47a066 req-9a2f0248-a0ea-43f1-acab-9f7d3de6ebfc service nova] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Received event network-vif-plugged-25a56a44-0674-4327-a5ba-d265bf1a3d95 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2216.375453] env[68673]: DEBUG oslo_concurrency.lockutils [req-a5b3df41-8ae4-4c08-8988-0debac47a066 req-9a2f0248-a0ea-43f1-acab-9f7d3de6ebfc service nova] Acquiring lock "643c6d20-b3b9-440d-82f2-7c09a609717d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.375453] env[68673]: DEBUG oslo_concurrency.lockutils [req-a5b3df41-8ae4-4c08-8988-0debac47a066 req-9a2f0248-a0ea-43f1-acab-9f7d3de6ebfc service nova] Lock "643c6d20-b3b9-440d-82f2-7c09a609717d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.375453] env[68673]: DEBUG oslo_concurrency.lockutils [req-a5b3df41-8ae4-4c08-8988-0debac47a066 req-9a2f0248-a0ea-43f1-acab-9f7d3de6ebfc service nova] Lock "643c6d20-b3b9-440d-82f2-7c09a609717d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.375676] env[68673]: DEBUG nova.compute.manager [req-a5b3df41-8ae4-4c08-8988-0debac47a066 req-9a2f0248-a0ea-43f1-acab-9f7d3de6ebfc service nova] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] No waiting events found dispatching network-vif-plugged-25a56a44-0674-4327-a5ba-d265bf1a3d95 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2216.375676] env[68673]: WARNING nova.compute.manager [req-a5b3df41-8ae4-4c08-8988-0debac47a066 req-9a2f0248-a0ea-43f1-acab-9f7d3de6ebfc service nova] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Received unexpected event network-vif-plugged-25a56a44-0674-4327-a5ba-d265bf1a3d95 for instance with vm_state building and task_state spawning. [ 2216.440734] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Successfully updated port: 25a56a44-0674-4327-a5ba-d265bf1a3d95 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2216.441621] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Updating instance_info_cache with network_info: [{"id": "18270d0c-02c9-426c-8584-c6d71d03a5e9", "address": "fa:16:3e:d6:89:7f", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18270d0c-02", "ovs_interfaceid": "18270d0c-02c9-426c-8584-c6d71d03a5e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.449729] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "refresh_cache-643c6d20-b3b9-440d-82f2-7c09a609717d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2216.450061] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired lock "refresh_cache-643c6d20-b3b9-440d-82f2-7c09a609717d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2216.450364] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2216.453548] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Releasing lock "refresh_cache-3277e704-c970-4482-a812-f02e297f99d1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2216.453548] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Instance network_info: |[{"id": "18270d0c-02c9-426c-8584-c6d71d03a5e9", "address": "fa:16:3e:d6:89:7f", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18270d0c-02", "ovs_interfaceid": "18270d0c-02c9-426c-8584-c6d71d03a5e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2216.454316] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:89:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18270d0c-02c9-426c-8584-c6d71d03a5e9', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2216.460997] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Creating folder: Project (452e234cb78e44f5b2c1fe13e8b207b9). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2216.461898] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a6a23fa-790a-491b-8998-105e552eb694 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.474606] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Created folder: Project (452e234cb78e44f5b2c1fe13e8b207b9) in parent group-v685311. [ 2216.474606] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Creating folder: Instances. Parent ref: group-v685421. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2216.474834] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb7411da-4d58-4b76-aa5e-81fec179601f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.484166] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Created folder: Instances in parent group-v685421. [ 2216.484400] env[68673]: DEBUG oslo.service.loopingcall [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.484578] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2216.484772] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c2c53ca-f7b3-4ffa-ac20-6e79daf040ef {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.499685] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2216.506699] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2216.506699] env[68673]: value = "task-3433634" [ 2216.506699] env[68673]: _type = "Task" [ 2216.506699] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.514163] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433634, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.699285] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Updating instance_info_cache with network_info: [{"id": "25a56a44-0674-4327-a5ba-d265bf1a3d95", "address": "fa:16:3e:7e:9e:6a", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a56a44-06", "ovs_interfaceid": "25a56a44-0674-4327-a5ba-d265bf1a3d95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.713203] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Releasing lock "refresh_cache-643c6d20-b3b9-440d-82f2-7c09a609717d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2216.713478] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Instance network_info: |[{"id": "25a56a44-0674-4327-a5ba-d265bf1a3d95", "address": "fa:16:3e:7e:9e:6a", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a56a44-06", "ovs_interfaceid": "25a56a44-0674-4327-a5ba-d265bf1a3d95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2216.713890] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:9e:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25a56a44-0674-4327-a5ba-d265bf1a3d95', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2216.722290] env[68673]: DEBUG oslo.service.loopingcall [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.722845] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2216.723095] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91695d37-779c-4778-9ddc-30e0c277ab75 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.744182] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2216.744182] env[68673]: value = "task-3433635" [ 2216.744182] env[68673]: _type = "Task" [ 2216.744182] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.752478] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433635, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.783614] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2217.016268] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433634, 'name': CreateVM_Task, 'duration_secs': 0.321796} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.016617] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2217.017219] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2217.017425] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.017760] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2217.018041] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e5132af-308b-42d4-9c59-79ed808aeb28 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.022254] env[68673]: DEBUG oslo_vmware.api [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Waiting for the task: (returnval){ [ 2217.022254] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52f955a7-b9f0-45e2-3547-eaeadccf052e" [ 2217.022254] env[68673]: _type = "Task" [ 2217.022254] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.030205] env[68673]: DEBUG oslo_vmware.api [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52f955a7-b9f0-45e2-3547-eaeadccf052e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.254156] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433635, 'name': CreateVM_Task, 'duration_secs': 0.281509} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.254341] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2217.254963] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2217.532629] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.532884] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2217.533100] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2217.533320] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.533635] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2217.533880] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7881cfe-9dd4-479e-8cb8-4ad02b4d9a30 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.538244] env[68673]: DEBUG oslo_vmware.api [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Waiting for the task: (returnval){ [ 2217.538244] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]523dbd72-716a-0843-bdd1-ae4c772f0961" [ 2217.538244] env[68673]: _type = "Task" [ 2217.538244] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.545620] env[68673]: DEBUG oslo_vmware.api [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]523dbd72-716a-0843-bdd1-ae4c772f0961, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.050054] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2218.050054] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2218.050435] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.174612] env[68673]: DEBUG nova.compute.manager [req-7429b922-7b6f-45da-b39f-216b5f0d447c req-963d20bb-dac9-43fb-86b1-98e3168d0e2c service nova] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Received event network-changed-18270d0c-02c9-426c-8584-c6d71d03a5e9 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2218.174860] env[68673]: DEBUG nova.compute.manager [req-7429b922-7b6f-45da-b39f-216b5f0d447c req-963d20bb-dac9-43fb-86b1-98e3168d0e2c service nova] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Refreshing instance network info cache due to event network-changed-18270d0c-02c9-426c-8584-c6d71d03a5e9. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2218.175117] env[68673]: DEBUG oslo_concurrency.lockutils [req-7429b922-7b6f-45da-b39f-216b5f0d447c req-963d20bb-dac9-43fb-86b1-98e3168d0e2c service nova] Acquiring lock "refresh_cache-3277e704-c970-4482-a812-f02e297f99d1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.175270] env[68673]: DEBUG oslo_concurrency.lockutils [req-7429b922-7b6f-45da-b39f-216b5f0d447c req-963d20bb-dac9-43fb-86b1-98e3168d0e2c service nova] Acquired lock "refresh_cache-3277e704-c970-4482-a812-f02e297f99d1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.175436] env[68673]: DEBUG nova.network.neutron [req-7429b922-7b6f-45da-b39f-216b5f0d447c req-963d20bb-dac9-43fb-86b1-98e3168d0e2c service nova] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Refreshing network info cache for port 18270d0c-02c9-426c-8584-c6d71d03a5e9 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2218.403394] env[68673]: DEBUG nova.compute.manager [req-c3404af4-2a7e-49fd-9037-de2f957ee0cf req-5b6ab542-c85b-42f4-859b-79451ce6c784 service nova] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Received event network-changed-25a56a44-0674-4327-a5ba-d265bf1a3d95 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2218.403596] env[68673]: DEBUG nova.compute.manager [req-c3404af4-2a7e-49fd-9037-de2f957ee0cf req-5b6ab542-c85b-42f4-859b-79451ce6c784 service nova] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Refreshing instance network info cache due to event network-changed-25a56a44-0674-4327-a5ba-d265bf1a3d95. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2218.403805] env[68673]: DEBUG oslo_concurrency.lockutils [req-c3404af4-2a7e-49fd-9037-de2f957ee0cf req-5b6ab542-c85b-42f4-859b-79451ce6c784 service nova] Acquiring lock "refresh_cache-643c6d20-b3b9-440d-82f2-7c09a609717d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.403945] env[68673]: DEBUG oslo_concurrency.lockutils [req-c3404af4-2a7e-49fd-9037-de2f957ee0cf req-5b6ab542-c85b-42f4-859b-79451ce6c784 service nova] Acquired lock "refresh_cache-643c6d20-b3b9-440d-82f2-7c09a609717d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.404202] env[68673]: DEBUG nova.network.neutron [req-c3404af4-2a7e-49fd-9037-de2f957ee0cf req-5b6ab542-c85b-42f4-859b-79451ce6c784 service nova] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Refreshing network info cache for port 25a56a44-0674-4327-a5ba-d265bf1a3d95 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2218.420657] env[68673]: DEBUG nova.network.neutron [req-7429b922-7b6f-45da-b39f-216b5f0d447c req-963d20bb-dac9-43fb-86b1-98e3168d0e2c service nova] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Updated VIF entry in instance network info cache for port 18270d0c-02c9-426c-8584-c6d71d03a5e9. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2218.420979] env[68673]: DEBUG nova.network.neutron [req-7429b922-7b6f-45da-b39f-216b5f0d447c req-963d20bb-dac9-43fb-86b1-98e3168d0e2c service nova] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Updating instance_info_cache with network_info: [{"id": "18270d0c-02c9-426c-8584-c6d71d03a5e9", "address": "fa:16:3e:d6:89:7f", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18270d0c-02", "ovs_interfaceid": "18270d0c-02c9-426c-8584-c6d71d03a5e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.430455] env[68673]: DEBUG oslo_concurrency.lockutils [req-7429b922-7b6f-45da-b39f-216b5f0d447c req-963d20bb-dac9-43fb-86b1-98e3168d0e2c service nova] Releasing lock "refresh_cache-3277e704-c970-4482-a812-f02e297f99d1" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2218.651121] env[68673]: DEBUG nova.network.neutron [req-c3404af4-2a7e-49fd-9037-de2f957ee0cf req-5b6ab542-c85b-42f4-859b-79451ce6c784 service nova] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Updated VIF entry in instance network info cache for port 25a56a44-0674-4327-a5ba-d265bf1a3d95. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2218.651502] env[68673]: DEBUG nova.network.neutron [req-c3404af4-2a7e-49fd-9037-de2f957ee0cf req-5b6ab542-c85b-42f4-859b-79451ce6c784 service nova] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Updating instance_info_cache with network_info: [{"id": "25a56a44-0674-4327-a5ba-d265bf1a3d95", "address": "fa:16:3e:7e:9e:6a", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a56a44-06", "ovs_interfaceid": "25a56a44-0674-4327-a5ba-d265bf1a3d95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.660768] env[68673]: DEBUG oslo_concurrency.lockutils [req-c3404af4-2a7e-49fd-9037-de2f957ee0cf req-5b6ab542-c85b-42f4-859b-79451ce6c784 service nova] Releasing lock "refresh_cache-643c6d20-b3b9-440d-82f2-7c09a609717d" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2218.783559] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2218.783803] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2218.783951] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2219.779111] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2219.782802] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2219.782999] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2219.783146] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2219.804374] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.804522] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.804649] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.804774] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.804894] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.805023] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.805149] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.805268] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.805385] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.805500] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2219.805616] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2221.783017] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2221.794214] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.794421] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.794585] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.794743] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2221.795852] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cf51c3-868e-4cb5-9a1f-53c62c294f2f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.804589] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a6cdd5-215c-4c76-a62e-0d1b9368e0cf {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.818816] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6964e123-16bc-4d9a-becd-2fb635ad0be3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.824751] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6edb205-dca9-4faf-8c5c-90e2d622bf92 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.854969] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180854MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2221.855159] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.855321] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.926045] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.926215] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.926343] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.926467] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.926589] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.926710] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 07f2fc85-14df-4702-bf49-67d8ce8e9526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.926826] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 85ee6b4a-3c96-4be5-81d2-8b3ca661924e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.926943] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4000d9e1-c566-4b4f-be56-eacaafa0a0a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.927070] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3277e704-c970-4482-a812-f02e297f99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.927187] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 643c6d20-b3b9-440d-82f2-7c09a609717d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2221.939545] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f7b5663b-1c63-47d9-85dc-59a47a82d5b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2221.939782] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2221.939930] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2222.072610] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2167fc04-2111-4703-a8ce-cda33c480dc2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.080316] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afd37ef-1a0b-4716-9685-e6231fe81829 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.109588] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c0b7f5-3aab-410c-90f6-b4d7382ad4cb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.116280] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f825986-64f9-48c8-8be7-68df67e66a9a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.128895] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2222.137477] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2222.152184] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2222.152364] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.297s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2223.153147] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.153503] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2226.783522] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2253.017911] env[68673]: WARNING oslo_vmware.rw_handles [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2253.017911] env[68673]: ERROR oslo_vmware.rw_handles [ 2253.018885] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2253.020303] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2253.020544] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Copying Virtual Disk [datastore1] vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/e79c1da1-ae39-4425-957f-0d58568739da/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2253.020840] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc95c88e-69d0-472c-9c9f-4660f46f5a52 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.028391] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 2253.028391] env[68673]: value = "task-3433636" [ 2253.028391] env[68673]: _type = "Task" [ 2253.028391] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.036174] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': task-3433636, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.539031] env[68673]: DEBUG oslo_vmware.exceptions [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2253.539355] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2253.539932] env[68673]: ERROR nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2253.539932] env[68673]: Faults: ['InvalidArgument'] [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Traceback (most recent call last): [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] yield resources [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] self.driver.spawn(context, instance, image_meta, [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] self._fetch_image_if_missing(context, vi) [ 2253.539932] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] image_cache(vi, tmp_image_ds_loc) [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] vm_util.copy_virtual_disk( [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] session._wait_for_task(vmdk_copy_task) [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] return self.wait_for_task(task_ref) [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] return evt.wait() [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] result = hub.switch() [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2253.540593] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] return self.greenlet.switch() [ 2253.541217] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2253.541217] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] self.f(*self.args, **self.kw) [ 2253.541217] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2253.541217] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] raise exceptions.translate_fault(task_info.error) [ 2253.541217] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2253.541217] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Faults: ['InvalidArgument'] [ 2253.541217] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] [ 2253.541217] env[68673]: INFO nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Terminating instance [ 2253.541852] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2253.542073] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2253.542315] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eeeee5c3-c81d-415c-8751-4f77c899546c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.544661] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2253.544846] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2253.545574] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135692fe-77fc-4609-907e-ad27acb185af {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.551973] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2253.552454] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b08d57e6-ce23-4d98-94f4-d22a689c2792 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.554288] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2253.554535] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2253.555364] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c096bfff-cc1b-45c6-b12f-dbada6a620ab {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.560056] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Waiting for the task: (returnval){ [ 2253.560056] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52850a38-f108-9e16-cafc-a1eb2a3fcde7" [ 2253.560056] env[68673]: _type = "Task" [ 2253.560056] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.573528] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52850a38-f108-9e16-cafc-a1eb2a3fcde7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.070478] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2254.070802] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Creating directory with path [datastore1] vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2254.070999] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afc19075-ce3f-43cd-856f-545167fef497 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.089967] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Created directory with path [datastore1] vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2254.090173] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Fetch image to [datastore1] vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2254.090344] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2254.091050] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c71ab1-a589-4ede-bb0d-d976e7bf3691 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.097342] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a873c00-a5f8-4b53-b16b-c4e60af6342a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.106154] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d59ba7-cecd-4184-9e61-b86253af58c0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.136580] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de5e196-62dd-42d7-8541-091e0514d6a4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.141653] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9a332121-375a-4a4f-ac8d-aee7073f49cb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.168635] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2254.218592] env[68673]: DEBUG oslo_vmware.rw_handles [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2254.278352] env[68673]: DEBUG oslo_vmware.rw_handles [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2254.278568] env[68673]: DEBUG oslo_vmware.rw_handles [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2255.444180] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2255.444572] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2255.444664] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Deleting the datastore file [datastore1] 3e1b47d8-e829-416d-baca-b15e3d0d358d {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2255.444861] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5afb9e4-dff2-4481-9ca9-880f090c4f08 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.450854] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 2255.450854] env[68673]: value = "task-3433638" [ 2255.450854] env[68673]: _type = "Task" [ 2255.450854] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.458575] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': task-3433638, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.960858] env[68673]: DEBUG oslo_vmware.api [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': task-3433638, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072877} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2255.961118] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2255.961303] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2255.961475] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2255.961656] env[68673]: INFO nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Took 2.42 seconds to destroy the instance on the hypervisor. [ 2255.964126] env[68673]: DEBUG nova.compute.claims [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2255.964295] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.964522] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.133220] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d34fd83-fdad-4372-a9c1-5c47d6e8d0de {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.140897] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5f5be8-e86c-4b69-9b1b-712f0cca3d59 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.169852] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd674269-d194-4af1-918a-989a5a9ca31c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.176375] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c24098f-f596-4c13-be49-3d26d0daaab5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.190104] env[68673]: DEBUG nova.compute.provider_tree [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2256.198693] env[68673]: DEBUG nova.scheduler.client.report [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2256.212805] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.248s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.213353] env[68673]: ERROR nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2256.213353] env[68673]: Faults: ['InvalidArgument'] [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Traceback (most recent call last): [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] self.driver.spawn(context, instance, image_meta, [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] self._fetch_image_if_missing(context, vi) [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] image_cache(vi, tmp_image_ds_loc) [ 2256.213353] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] vm_util.copy_virtual_disk( [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] session._wait_for_task(vmdk_copy_task) [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] return self.wait_for_task(task_ref) [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] return evt.wait() [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] result = hub.switch() [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] return self.greenlet.switch() [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2256.213845] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] self.f(*self.args, **self.kw) [ 2256.214323] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2256.214323] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] raise exceptions.translate_fault(task_info.error) [ 2256.214323] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2256.214323] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Faults: ['InvalidArgument'] [ 2256.214323] env[68673]: ERROR nova.compute.manager [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] [ 2256.214323] env[68673]: DEBUG nova.compute.utils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2256.216099] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Build of instance 3e1b47d8-e829-416d-baca-b15e3d0d358d was re-scheduled: A specified parameter was not correct: fileType [ 2256.216099] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2256.216468] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2256.216685] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2256.216894] env[68673]: DEBUG nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2256.217082] env[68673]: DEBUG nova.network.neutron [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2256.488448] env[68673]: DEBUG nova.network.neutron [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2256.498355] env[68673]: INFO nova.compute.manager [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Took 0.28 seconds to deallocate network for instance. [ 2256.590953] env[68673]: INFO nova.scheduler.client.report [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Deleted allocations for instance 3e1b47d8-e829-416d-baca-b15e3d0d358d [ 2256.614048] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7dd32c95-e05b-4dfd-a411-2fff381b66af tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 679.554s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.615554] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 483.816s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.616976] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquiring lock "3e1b47d8-e829-416d-baca-b15e3d0d358d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2256.616976] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.616976] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.618638] env[68673]: INFO nova.compute.manager [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Terminating instance [ 2256.620586] env[68673]: DEBUG nova.compute.manager [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2256.620778] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2256.621266] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6314557-5827-44d8-b4d4-724060ee0993 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.625429] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2256.631993] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6558959-6302-4253-aaa3-a60e6df8985d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.661766] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3e1b47d8-e829-416d-baca-b15e3d0d358d could not be found. [ 2256.661766] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2256.662031] env[68673]: INFO nova.compute.manager [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2256.662325] env[68673]: DEBUG oslo.service.loopingcall [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2256.664465] env[68673]: DEBUG nova.compute.manager [-] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2256.664575] env[68673]: DEBUG nova.network.neutron [-] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2256.678146] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2256.678397] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.679926] env[68673]: INFO nova.compute.claims [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2256.690913] env[68673]: DEBUG nova.network.neutron [-] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2256.707521] env[68673]: INFO nova.compute.manager [-] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] Took 0.04 seconds to deallocate network for instance. [ 2256.799186] env[68673]: DEBUG oslo_concurrency.lockutils [None req-bf76bfe8-e7ca-4cc8-b991-89f435d8005b tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.800278] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 427.678s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.800469] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3e1b47d8-e829-416d-baca-b15e3d0d358d] During sync_power_state the instance has a pending task (deleting). Skip. [ 2256.800677] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "3e1b47d8-e829-416d-baca-b15e3d0d358d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.853279] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d125f2-d5c7-44f6-82e2-95b9d8e3677c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.862194] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cd2794-65c8-4c00-9235-6009a506c033 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.894258] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1193dbd4-e05a-4f83-93e4-3a0afb528f8d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.901159] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e26947b-b4a5-43de-a686-e00afb31b597 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.914502] env[68673]: DEBUG nova.compute.provider_tree [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2256.923186] env[68673]: DEBUG nova.scheduler.client.report [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2256.938547] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.260s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.939084] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2256.970751] env[68673]: DEBUG nova.compute.utils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2256.972225] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2256.972394] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2256.980682] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2257.037851] env[68673]: DEBUG nova.policy [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37d5217b19814e26a011ef8dc48792b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '452e234cb78e44f5b2c1fe13e8b207b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 2257.044110] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2257.068023] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2257.068204] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2257.068369] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2257.068556] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2257.068704] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2257.068912] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2257.069155] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2257.069322] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2257.069492] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2257.069656] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2257.070082] env[68673]: DEBUG nova.virt.hardware [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2257.070718] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2faa1b51-afd0-4d63-856c-5aa90faa1be5 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.078010] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abc1682-e47c-4882-b508-6a6f38f71e75 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.324134] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Successfully created port: eb856593-79bc-4cec-8c98-79bd3f12a911 {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2258.197150] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Successfully updated port: eb856593-79bc-4cec-8c98-79bd3f12a911 {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2258.208363] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "refresh_cache-f7b5663b-1c63-47d9-85dc-59a47a82d5b9" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2258.208506] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired lock "refresh_cache-f7b5663b-1c63-47d9-85dc-59a47a82d5b9" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2258.208680] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2258.265598] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2258.424758] env[68673]: DEBUG nova.network.neutron [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Updating instance_info_cache with network_info: [{"id": "eb856593-79bc-4cec-8c98-79bd3f12a911", "address": "fa:16:3e:09:c0:4e", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb856593-79", "ovs_interfaceid": "eb856593-79bc-4cec-8c98-79bd3f12a911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2258.437614] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Releasing lock "refresh_cache-f7b5663b-1c63-47d9-85dc-59a47a82d5b9" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2258.437893] env[68673]: DEBUG nova.compute.manager [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Instance network_info: |[{"id": "eb856593-79bc-4cec-8c98-79bd3f12a911", "address": "fa:16:3e:09:c0:4e", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb856593-79", "ovs_interfaceid": "eb856593-79bc-4cec-8c98-79bd3f12a911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2258.438313] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:c0:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb856593-79bc-4cec-8c98-79bd3f12a911', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2258.445710] env[68673]: DEBUG oslo.service.loopingcall [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2258.446168] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2258.446390] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e0e83bd-bf02-46f5-a2cf-280c28920182 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.466574] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2258.466574] env[68673]: value = "task-3433639" [ 2258.466574] env[68673]: _type = "Task" [ 2258.466574] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.474520] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433639, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.530801] env[68673]: DEBUG nova.compute.manager [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Received event network-vif-plugged-eb856593-79bc-4cec-8c98-79bd3f12a911 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2258.531214] env[68673]: DEBUG oslo_concurrency.lockutils [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] Acquiring lock "f7b5663b-1c63-47d9-85dc-59a47a82d5b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.531372] env[68673]: DEBUG oslo_concurrency.lockutils [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] Lock "f7b5663b-1c63-47d9-85dc-59a47a82d5b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.531575] env[68673]: DEBUG oslo_concurrency.lockutils [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] Lock "f7b5663b-1c63-47d9-85dc-59a47a82d5b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.531856] env[68673]: DEBUG nova.compute.manager [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] No waiting events found dispatching network-vif-plugged-eb856593-79bc-4cec-8c98-79bd3f12a911 {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2258.532107] env[68673]: WARNING nova.compute.manager [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Received unexpected event network-vif-plugged-eb856593-79bc-4cec-8c98-79bd3f12a911 for instance with vm_state building and task_state spawning. [ 2258.532316] env[68673]: DEBUG nova.compute.manager [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Received event network-changed-eb856593-79bc-4cec-8c98-79bd3f12a911 {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2258.532457] env[68673]: DEBUG nova.compute.manager [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Refreshing instance network info cache due to event network-changed-eb856593-79bc-4cec-8c98-79bd3f12a911. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2258.532629] env[68673]: DEBUG oslo_concurrency.lockutils [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] Acquiring lock "refresh_cache-f7b5663b-1c63-47d9-85dc-59a47a82d5b9" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2258.532767] env[68673]: DEBUG oslo_concurrency.lockutils [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] Acquired lock "refresh_cache-f7b5663b-1c63-47d9-85dc-59a47a82d5b9" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2258.533110] env[68673]: DEBUG nova.network.neutron [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Refreshing network info cache for port eb856593-79bc-4cec-8c98-79bd3f12a911 {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2258.781300] env[68673]: DEBUG nova.network.neutron [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Updated VIF entry in instance network info cache for port eb856593-79bc-4cec-8c98-79bd3f12a911. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2258.781680] env[68673]: DEBUG nova.network.neutron [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Updating instance_info_cache with network_info: [{"id": "eb856593-79bc-4cec-8c98-79bd3f12a911", "address": "fa:16:3e:09:c0:4e", "network": {"id": "6574f0aa-28c3-4639-8f1e-07442345123f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-568511220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452e234cb78e44f5b2c1fe13e8b207b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb856593-79", "ovs_interfaceid": "eb856593-79bc-4cec-8c98-79bd3f12a911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2258.793127] env[68673]: DEBUG oslo_concurrency.lockutils [req-315d4055-3f51-4025-a128-f2aceb711b15 req-5622af04-8531-41d5-bfbe-b98f5d951c93 service nova] Releasing lock "refresh_cache-f7b5663b-1c63-47d9-85dc-59a47a82d5b9" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2258.976549] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433639, 'name': CreateVM_Task, 'duration_secs': 0.314951} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.976732] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2258.977365] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2258.977529] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2258.977833] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2258.978081] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e72796dc-4da3-47ad-863f-2f25d10a9a8e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.982596] env[68673]: DEBUG oslo_vmware.api [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Waiting for the task: (returnval){ [ 2258.982596] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5215519c-a960-8578-8c3f-b94c75ddc4f1" [ 2258.982596] env[68673]: _type = "Task" [ 2258.982596] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.989897] env[68673]: DEBUG oslo_vmware.api [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5215519c-a960-8578-8c3f-b94c75ddc4f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.493679] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2259.493679] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2259.493679] env[68673]: DEBUG oslo_concurrency.lockutils [None req-69451c13-7d68-4b84-8cb3-89e7dc44ffeb tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2269.263172] env[68673]: DEBUG oslo_concurrency.lockutils [None req-8455c1d9-4d72-4ef0-aade-5df13198bb54 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "85ee6b4a-3c96-4be5-81d2-8b3ca661924e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2277.784550] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2278.783142] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2278.783418] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2278.783625] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2279.784623] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2279.784969] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2279.784969] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2279.807264] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.807425] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.807559] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.807685] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.807805] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.807925] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.808063] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.808185] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.808304] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.808419] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2279.808563] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2281.802757] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.783280] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.783520] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.780234] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.800913] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.812278] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.812473] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.812638] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.812789] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2283.813876] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d055279-24d2-46ff-aca9-874ce26a4ff6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.822640] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d99a6bf-7594-4e53-bf9e-3273e7a043da {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.836419] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5566b46a-baab-4cad-af45-935c97843484 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.842339] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779b0910-4ef6-4a39-88dc-a0caf5242ba8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.870982] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180875MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2283.871132] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.871326] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.942388] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.942621] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.942708] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.942827] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.942944] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 07f2fc85-14df-4702-bf49-67d8ce8e9526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.943071] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 85ee6b4a-3c96-4be5-81d2-8b3ca661924e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.943188] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4000d9e1-c566-4b4f-be56-eacaafa0a0a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.943303] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3277e704-c970-4482-a812-f02e297f99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.943417] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 643c6d20-b3b9-440d-82f2-7c09a609717d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.943530] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f7b5663b-1c63-47d9-85dc-59a47a82d5b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2283.943715] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2283.943849] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2284.052070] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c6065f-d3c4-4d0e-be12-faf68402ce61 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.060817] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eac3044-175d-49cd-815d-5de50bcf7d8d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.089871] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec7550f-17be-4fb8-be9d-7e5fcf0aad35 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.096383] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f07a0d1-3035-43f6-b1f4-f5d6c6cd6c6e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.108750] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2284.116815] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2284.130973] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2284.131178] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.260s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.114052] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.837067] env[68673]: WARNING oslo_vmware.rw_handles [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2300.837067] env[68673]: ERROR oslo_vmware.rw_handles [ 2300.837067] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2300.839357] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2300.839616] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Copying Virtual Disk [datastore1] vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/909ac75c-4536-4068-aa11-b08f9047d8c5/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2300.839910] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6af3a9e-b90c-433a-963f-3b3d9f0af738 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.847630] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Waiting for the task: (returnval){ [ 2300.847630] env[68673]: value = "task-3433640" [ 2300.847630] env[68673]: _type = "Task" [ 2300.847630] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2300.855502] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Task: {'id': task-3433640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2301.359473] env[68673]: DEBUG oslo_vmware.exceptions [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2301.359716] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2301.360284] env[68673]: ERROR nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2301.360284] env[68673]: Faults: ['InvalidArgument'] [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Traceback (most recent call last): [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] yield resources [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] self.driver.spawn(context, instance, image_meta, [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] self._fetch_image_if_missing(context, vi) [ 2301.360284] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] image_cache(vi, tmp_image_ds_loc) [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] vm_util.copy_virtual_disk( [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] session._wait_for_task(vmdk_copy_task) [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] return self.wait_for_task(task_ref) [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] return evt.wait() [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] result = hub.switch() [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2301.360915] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] return self.greenlet.switch() [ 2301.361564] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2301.361564] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] self.f(*self.args, **self.kw) [ 2301.361564] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2301.361564] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] raise exceptions.translate_fault(task_info.error) [ 2301.361564] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2301.361564] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Faults: ['InvalidArgument'] [ 2301.361564] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] [ 2301.361564] env[68673]: INFO nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Terminating instance [ 2301.363039] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2301.363039] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2301.363039] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4675cfc4-4e48-4c6a-a642-e774853070fc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.364916] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2301.365120] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2301.365818] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9f1393-cf7c-40c9-b645-233f595ee104 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.373584] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2301.373782] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-500cffc3-4c56-45c1-b179-16ca1ac16828 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.375772] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2301.375939] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2301.376876] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-885ca284-5b03-414f-927c-bac158bd1c83 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.381245] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 2301.381245] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52e12165-bb9c-ca83-7513-7797361acf7d" [ 2301.381245] env[68673]: _type = "Task" [ 2301.381245] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2301.389265] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52e12165-bb9c-ca83-7513-7797361acf7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2301.446540] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2301.446743] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2301.446913] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Deleting the datastore file [datastore1] db978196-4b5b-4d15-84c6-5e1f355d0e75 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2301.447200] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-394557ce-5bed-4fd9-8b0b-3b2e043e5039 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.456285] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Waiting for the task: (returnval){ [ 2301.456285] env[68673]: value = "task-3433642" [ 2301.456285] env[68673]: _type = "Task" [ 2301.456285] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2301.463670] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Task: {'id': task-3433642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2301.891727] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2301.892129] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating directory with path [datastore1] vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2301.892229] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c3ca37c-0a46-4cf1-b0f6-f2680d024f5a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.903210] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Created directory with path [datastore1] vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2301.903401] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Fetch image to [datastore1] vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2301.903573] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2301.904284] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9b56c2-3c8d-489f-b076-808ecbc3968c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.910625] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bdc18b-a980-4b36-8cdd-cb07297285da {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.919502] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f85b73a-de3d-4bfb-bba6-854776d203d6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.950411] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b23ed1-657b-4d41-944f-bc9c83f3b273 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.955791] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e66fd529-284f-43fd-b568-ef5aaef15d75 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.964671] env[68673]: DEBUG oslo_vmware.api [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Task: {'id': task-3433642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061411} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2301.964903] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2301.965100] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2301.965266] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2301.965451] env[68673]: INFO nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2301.967781] env[68673]: DEBUG nova.compute.claims [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2301.967870] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2301.968065] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2301.981031] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2302.029725] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2302.090724] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2302.090923] env[68673]: DEBUG oslo_vmware.rw_handles [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2302.186651] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbd2ee6-c79d-4a69-b99c-635ed82812eb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.194746] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6281fee-7334-4ab9-9324-446ef3b14285 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.226036] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63106718-6762-4f3b-a287-534cc7733631 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.232786] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c92af0b-f192-4eca-a600-9180761f5fef {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.245529] env[68673]: DEBUG nova.compute.provider_tree [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2302.255049] env[68673]: DEBUG nova.scheduler.client.report [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2302.271297] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.303s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.272055] env[68673]: ERROR nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2302.272055] env[68673]: Faults: ['InvalidArgument'] [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Traceback (most recent call last): [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] self.driver.spawn(context, instance, image_meta, [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] self._fetch_image_if_missing(context, vi) [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] image_cache(vi, tmp_image_ds_loc) [ 2302.272055] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] vm_util.copy_virtual_disk( [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] session._wait_for_task(vmdk_copy_task) [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] return self.wait_for_task(task_ref) [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] return evt.wait() [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] result = hub.switch() [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] return self.greenlet.switch() [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2302.272329] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] self.f(*self.args, **self.kw) [ 2302.272612] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2302.272612] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] raise exceptions.translate_fault(task_info.error) [ 2302.272612] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2302.272612] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Faults: ['InvalidArgument'] [ 2302.272612] env[68673]: ERROR nova.compute.manager [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] [ 2302.273037] env[68673]: DEBUG nova.compute.utils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2302.274713] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Build of instance db978196-4b5b-4d15-84c6-5e1f355d0e75 was re-scheduled: A specified parameter was not correct: fileType [ 2302.274713] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2302.275015] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2302.275202] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2302.275374] env[68673]: DEBUG nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2302.275537] env[68673]: DEBUG nova.network.neutron [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2302.566664] env[68673]: DEBUG nova.network.neutron [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2302.577299] env[68673]: INFO nova.compute.manager [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Took 0.30 seconds to deallocate network for instance. [ 2302.670276] env[68673]: INFO nova.scheduler.client.report [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Deleted allocations for instance db978196-4b5b-4d15-84c6-5e1f355d0e75 [ 2302.691934] env[68673]: DEBUG oslo_concurrency.lockutils [None req-74887eb5-14b1-463e-90d8-e800ca239aad tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.966s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.692097] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 473.569s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2302.692238] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] During sync_power_state the instance has a pending task (spawning). Skip. [ 2302.692445] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.693038] env[68673]: DEBUG oslo_concurrency.lockutils [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.114s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2302.693234] env[68673]: DEBUG oslo_concurrency.lockutils [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Acquiring lock "db978196-4b5b-4d15-84c6-5e1f355d0e75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2302.693547] env[68673]: DEBUG oslo_concurrency.lockutils [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2302.693596] env[68673]: DEBUG oslo_concurrency.lockutils [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.696189] env[68673]: INFO nova.compute.manager [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Terminating instance [ 2302.698307] env[68673]: DEBUG nova.compute.manager [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2302.698435] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2302.698766] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a5e1db9-472f-4755-b641-dbf6806942a9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.708314] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f92c6e-b028-4637-88fc-6648f4e339e1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.740194] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db978196-4b5b-4d15-84c6-5e1f355d0e75 could not be found. [ 2302.740410] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2302.740717] env[68673]: INFO nova.compute.manager [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2302.740846] env[68673]: DEBUG oslo.service.loopingcall [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2302.741082] env[68673]: DEBUG nova.compute.manager [-] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2302.741179] env[68673]: DEBUG nova.network.neutron [-] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2302.780879] env[68673]: DEBUG nova.network.neutron [-] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2302.789177] env[68673]: INFO nova.compute.manager [-] [instance: db978196-4b5b-4d15-84c6-5e1f355d0e75] Took 0.05 seconds to deallocate network for instance. [ 2302.877691] env[68673]: DEBUG oslo_concurrency.lockutils [None req-851d4e8c-7443-44aa-a250-e2015f39f069 tempest-AttachVolumeNegativeTest-1329212654 tempest-AttachVolumeNegativeTest-1329212654-project-member] Lock "db978196-4b5b-4d15-84c6-5e1f355d0e75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.193921] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.194196] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.204544] env[68673]: DEBUG nova.compute.manager [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2330.249301] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.249539] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.250941] env[68673]: INFO nova.compute.claims [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2330.403862] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3f0ab5-c5f0-4de7-a41c-90f7ec859d09 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.411663] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49eb4d15-194c-4dde-bc30-25bde870f01d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.442620] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0e900e-9430-42ae-a8bd-f0af27c6d404 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.449202] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10fc4a0-0933-4f83-bcfc-c35347e9ac0e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.462025] env[68673]: DEBUG nova.compute.provider_tree [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2330.470688] env[68673]: DEBUG nova.scheduler.client.report [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2330.483924] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.234s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.484413] env[68673]: DEBUG nova.compute.manager [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2330.516502] env[68673]: DEBUG nova.compute.utils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2330.518069] env[68673]: DEBUG nova.compute.manager [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2330.518247] env[68673]: DEBUG nova.network.neutron [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2330.528494] env[68673]: DEBUG nova.compute.manager [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2330.597597] env[68673]: DEBUG nova.compute.manager [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2330.621772] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2330.622048] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2330.622232] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2330.622425] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2330.622576] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2330.622723] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2330.622953] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2330.623162] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2330.623340] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2330.623505] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2330.623681] env[68673]: DEBUG nova.virt.hardware [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2330.624552] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8102b4-38fa-414e-8922-375ba197f738 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.632493] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2e7b90-c142-4471-8013-9d5be3e13b17 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.715213] env[68673]: DEBUG nova.policy [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a135381c6987442cad21b1f74d5a9e34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8ecabf65f524fb5bfcb60401c45db96', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 2331.038708] env[68673]: DEBUG nova.network.neutron [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Successfully created port: f5b6fbfe-f903-48b0-a61e-202ab345a6ca {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2331.579362] env[68673]: DEBUG nova.compute.manager [req-b660625f-f1a9-4c2b-81df-1b341ad3e4e0 req-a740117d-c909-4af4-8471-943cbff74888 service nova] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Received event network-vif-plugged-f5b6fbfe-f903-48b0-a61e-202ab345a6ca {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2331.579579] env[68673]: DEBUG oslo_concurrency.lockutils [req-b660625f-f1a9-4c2b-81df-1b341ad3e4e0 req-a740117d-c909-4af4-8471-943cbff74888 service nova] Acquiring lock "8f083884-b49e-4b55-a26d-7fcbb4bb6ae7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2331.579786] env[68673]: DEBUG oslo_concurrency.lockutils [req-b660625f-f1a9-4c2b-81df-1b341ad3e4e0 req-a740117d-c909-4af4-8471-943cbff74888 service nova] Lock "8f083884-b49e-4b55-a26d-7fcbb4bb6ae7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2331.579953] env[68673]: DEBUG oslo_concurrency.lockutils [req-b660625f-f1a9-4c2b-81df-1b341ad3e4e0 req-a740117d-c909-4af4-8471-943cbff74888 service nova] Lock "8f083884-b49e-4b55-a26d-7fcbb4bb6ae7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.580131] env[68673]: DEBUG nova.compute.manager [req-b660625f-f1a9-4c2b-81df-1b341ad3e4e0 req-a740117d-c909-4af4-8471-943cbff74888 service nova] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] No waiting events found dispatching network-vif-plugged-f5b6fbfe-f903-48b0-a61e-202ab345a6ca {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2331.580297] env[68673]: WARNING nova.compute.manager [req-b660625f-f1a9-4c2b-81df-1b341ad3e4e0 req-a740117d-c909-4af4-8471-943cbff74888 service nova] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Received unexpected event network-vif-plugged-f5b6fbfe-f903-48b0-a61e-202ab345a6ca for instance with vm_state building and task_state spawning. [ 2331.709161] env[68673]: DEBUG nova.network.neutron [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Successfully updated port: f5b6fbfe-f903-48b0-a61e-202ab345a6ca {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2331.719293] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "refresh_cache-8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2331.719439] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "refresh_cache-8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2331.719589] env[68673]: DEBUG nova.network.neutron [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2331.780731] env[68673]: DEBUG nova.network.neutron [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2331.958376] env[68673]: DEBUG nova.network.neutron [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Updating instance_info_cache with network_info: [{"id": "f5b6fbfe-f903-48b0-a61e-202ab345a6ca", "address": "fa:16:3e:76:01:c6", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b6fbfe-f9", "ovs_interfaceid": "f5b6fbfe-f903-48b0-a61e-202ab345a6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.971902] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "refresh_cache-8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2331.972245] env[68673]: DEBUG nova.compute.manager [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Instance network_info: |[{"id": "f5b6fbfe-f903-48b0-a61e-202ab345a6ca", "address": "fa:16:3e:76:01:c6", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b6fbfe-f9", "ovs_interfaceid": "f5b6fbfe-f903-48b0-a61e-202ab345a6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2331.972668] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:01:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5b6fbfe-f903-48b0-a61e-202ab345a6ca', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2331.980532] env[68673]: DEBUG oslo.service.loopingcall [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2331.981101] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2331.981342] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f32f5c20-843d-48b1-9db0-7c3bf699419b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.004591] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2332.004591] env[68673]: value = "task-3433643" [ 2332.004591] env[68673]: _type = "Task" [ 2332.004591] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.013508] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433643, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.515588] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433643, 'name': CreateVM_Task, 'duration_secs': 0.298657} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.515754] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2332.516459] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2332.516625] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2332.516944] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2332.517213] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da6421de-47a3-440e-94a1-e9cff75df48b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.521582] env[68673]: DEBUG oslo_vmware.api [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 2332.521582] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52c26f30-ade6-051a-2e14-5f37f2825c1e" [ 2332.521582] env[68673]: _type = "Task" [ 2332.521582] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.529335] env[68673]: DEBUG oslo_vmware.api [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52c26f30-ade6-051a-2e14-5f37f2825c1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.032319] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2333.032656] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2333.032726] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b6e752ee-d740-4f90-bf56-1a15c4eb2661 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2333.609164] env[68673]: DEBUG nova.compute.manager [req-2b5515a6-85e2-4bfc-ab22-b09edb20dc53 req-62d55a7b-3381-4d00-905c-dc4a86361c9c service nova] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Received event network-changed-f5b6fbfe-f903-48b0-a61e-202ab345a6ca {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2333.609453] env[68673]: DEBUG nova.compute.manager [req-2b5515a6-85e2-4bfc-ab22-b09edb20dc53 req-62d55a7b-3381-4d00-905c-dc4a86361c9c service nova] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Refreshing instance network info cache due to event network-changed-f5b6fbfe-f903-48b0-a61e-202ab345a6ca. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2333.609687] env[68673]: DEBUG oslo_concurrency.lockutils [req-2b5515a6-85e2-4bfc-ab22-b09edb20dc53 req-62d55a7b-3381-4d00-905c-dc4a86361c9c service nova] Acquiring lock "refresh_cache-8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2333.609846] env[68673]: DEBUG oslo_concurrency.lockutils [req-2b5515a6-85e2-4bfc-ab22-b09edb20dc53 req-62d55a7b-3381-4d00-905c-dc4a86361c9c service nova] Acquired lock "refresh_cache-8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2333.610014] env[68673]: DEBUG nova.network.neutron [req-2b5515a6-85e2-4bfc-ab22-b09edb20dc53 req-62d55a7b-3381-4d00-905c-dc4a86361c9c service nova] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Refreshing network info cache for port f5b6fbfe-f903-48b0-a61e-202ab345a6ca {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2333.847586] env[68673]: DEBUG nova.network.neutron [req-2b5515a6-85e2-4bfc-ab22-b09edb20dc53 req-62d55a7b-3381-4d00-905c-dc4a86361c9c service nova] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Updated VIF entry in instance network info cache for port f5b6fbfe-f903-48b0-a61e-202ab345a6ca. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2333.847961] env[68673]: DEBUG nova.network.neutron [req-2b5515a6-85e2-4bfc-ab22-b09edb20dc53 req-62d55a7b-3381-4d00-905c-dc4a86361c9c service nova] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Updating instance_info_cache with network_info: [{"id": "f5b6fbfe-f903-48b0-a61e-202ab345a6ca", "address": "fa:16:3e:76:01:c6", "network": {"id": "9ac4985a-ad88-4951-8301-df7edc487470", "bridge": "br-int", "label": "tempest-ServersTestJSON-1266937800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8ecabf65f524fb5bfcb60401c45db96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b6fbfe-f9", "ovs_interfaceid": "f5b6fbfe-f903-48b0-a61e-202ab345a6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2333.856776] env[68673]: DEBUG oslo_concurrency.lockutils [req-2b5515a6-85e2-4bfc-ab22-b09edb20dc53 req-62d55a7b-3381-4d00-905c-dc4a86361c9c service nova] Releasing lock "refresh_cache-8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2338.784534] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2338.784951] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.784800] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.785084] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2340.785145] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2340.806990] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.807171] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.807305] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.807431] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.807554] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.807675] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.807811] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.807950] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.808092] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.808218] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2340.808339] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2340.808816] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.808993] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2342.803822] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2343.783860] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.784704] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.783917] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.796693] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2345.797053] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2345.797115] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2345.797230] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2345.798395] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00fcbfd-dc02-43b1-9092-d75fc5706c3f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.806732] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9794bee-a109-4850-b674-dfb07e81cfa1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.821295] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f92c08-493a-43e2-8a17-a17dc989444b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.827218] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11781c3-4cc6-4214-b561-a628b9984de6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.855300] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180883MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2345.855432] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2345.855616] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2345.925784] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 16320084-1a09-493a-8ff2-389da64b92a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.925992] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.926144] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.926269] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 07f2fc85-14df-4702-bf49-67d8ce8e9526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.926389] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 85ee6b4a-3c96-4be5-81d2-8b3ca661924e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.926506] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4000d9e1-c566-4b4f-be56-eacaafa0a0a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.926621] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3277e704-c970-4482-a812-f02e297f99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.926769] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 643c6d20-b3b9-440d-82f2-7c09a609717d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.926844] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f7b5663b-1c63-47d9-85dc-59a47a82d5b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.926955] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.927163] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2345.927300] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2346.040622] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e40a075-1d71-4ab7-999e-1227740d1ba2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.047952] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0394b86-183d-43ed-8cdc-b023764e891a {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.078542] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98fe6ad-5983-4924-ae5d-b39d1dfe0ec1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.085136] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273e0390-6145-4bb7-bee7-11bcacc70698 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.097598] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2346.105684] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2346.118985] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2346.119177] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.264s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2349.119061] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2350.850178] env[68673]: WARNING oslo_vmware.rw_handles [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2350.850178] env[68673]: ERROR oslo_vmware.rw_handles [ 2350.850783] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2350.853142] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2350.853582] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Copying Virtual Disk [datastore1] vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/e53c3d57-1150-48fa-a543-366bea598e3a/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2350.853723] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d16c54d-b62a-40f8-951e-777d23bdac41 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.862105] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 2350.862105] env[68673]: value = "task-3433644" [ 2350.862105] env[68673]: _type = "Task" [ 2350.862105] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2350.869731] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433644, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.372314] env[68673]: DEBUG oslo_vmware.exceptions [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2351.372540] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2351.373111] env[68673]: ERROR nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2351.373111] env[68673]: Faults: ['InvalidArgument'] [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Traceback (most recent call last): [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] yield resources [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] self.driver.spawn(context, instance, image_meta, [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] self._fetch_image_if_missing(context, vi) [ 2351.373111] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] image_cache(vi, tmp_image_ds_loc) [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] vm_util.copy_virtual_disk( [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] session._wait_for_task(vmdk_copy_task) [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] return self.wait_for_task(task_ref) [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] return evt.wait() [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] result = hub.switch() [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2351.373446] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] return self.greenlet.switch() [ 2351.373757] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2351.373757] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] self.f(*self.args, **self.kw) [ 2351.373757] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2351.373757] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] raise exceptions.translate_fault(task_info.error) [ 2351.373757] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2351.373757] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Faults: ['InvalidArgument'] [ 2351.373757] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] [ 2351.373757] env[68673]: INFO nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Terminating instance [ 2351.375031] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2351.375261] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2351.375499] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6947508-1ae3-44c4-938b-22562394c8fd {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.377654] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2351.377882] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2351.378618] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62816a4d-c933-4d73-8e1b-661a8efb265d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.385426] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2351.385673] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3476030b-e203-40a5-b930-b68bb00d6ccc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.387815] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2351.387998] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2351.388958] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ea122aa-0918-47a2-b0c3-e4f1e262f516 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.393402] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 2351.393402] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524dacbe-36ea-af6d-9e60-1cf221d405cf" [ 2351.393402] env[68673]: _type = "Task" [ 2351.393402] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2351.400619] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]524dacbe-36ea-af6d-9e60-1cf221d405cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.446373] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2351.446590] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2351.446773] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleting the datastore file [datastore1] 16320084-1a09-493a-8ff2-389da64b92a2 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2351.447055] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b168838-2396-4312-ad1b-bbd49dc1550c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.454584] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for the task: (returnval){ [ 2351.454584] env[68673]: value = "task-3433646" [ 2351.454584] env[68673]: _type = "Task" [ 2351.454584] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2351.462266] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.904319] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2351.904593] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating directory with path [datastore1] vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2351.904824] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-645e069c-0c7e-43cf-adec-05f786d34368 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.916075] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Created directory with path [datastore1] vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2351.916274] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Fetch image to [datastore1] vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2351.916458] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2351.917186] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd587fa-6214-449e-aff2-aaea6ed2643d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.923836] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07907b3d-4cf1-4103-b166-6c3260a33a3e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.932780] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce0e961-1481-4823-9084-8fd1af3e667d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.967464] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6612fe-e09b-4fb2-aeae-874cb3fccf23 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.974350] env[68673]: DEBUG oslo_vmware.api [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Task: {'id': task-3433646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066406} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2351.975756] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2351.975951] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2351.976139] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2351.976317] env[68673]: INFO nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2351.978104] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e4879fd9-9fc1-4a77-ad3f-a6b636f0ecd6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.979955] env[68673]: DEBUG nova.compute.claims [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2351.980415] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2351.980415] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2352.003265] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2352.054893] env[68673]: DEBUG oslo_vmware.rw_handles [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2352.117229] env[68673]: DEBUG oslo_vmware.rw_handles [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2352.117475] env[68673]: DEBUG oslo_vmware.rw_handles [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2352.206991] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1834b18-9701-49f5-9f2a-cc454c401e61 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.215466] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9044482-16f3-443f-9a45-20c47f281402 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.247579] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff597b60-8541-4638-835e-a39dc136fd92 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.254735] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db5b4d5-e50d-4d2b-84ff-7f6582b23000 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.267801] env[68673]: DEBUG nova.compute.provider_tree [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2352.276587] env[68673]: DEBUG nova.scheduler.client.report [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2352.290904] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.310s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2352.291177] env[68673]: ERROR nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2352.291177] env[68673]: Faults: ['InvalidArgument'] [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Traceback (most recent call last): [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] self.driver.spawn(context, instance, image_meta, [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] self._fetch_image_if_missing(context, vi) [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] image_cache(vi, tmp_image_ds_loc) [ 2352.291177] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] vm_util.copy_virtual_disk( [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] session._wait_for_task(vmdk_copy_task) [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] return self.wait_for_task(task_ref) [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] return evt.wait() [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] result = hub.switch() [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] return self.greenlet.switch() [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2352.291533] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] self.f(*self.args, **self.kw) [ 2352.291846] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2352.291846] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] raise exceptions.translate_fault(task_info.error) [ 2352.291846] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2352.291846] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Faults: ['InvalidArgument'] [ 2352.291846] env[68673]: ERROR nova.compute.manager [instance: 16320084-1a09-493a-8ff2-389da64b92a2] [ 2352.291846] env[68673]: DEBUG nova.compute.utils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2352.293323] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Build of instance 16320084-1a09-493a-8ff2-389da64b92a2 was re-scheduled: A specified parameter was not correct: fileType [ 2352.293323] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2352.293679] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2352.293851] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2352.294044] env[68673]: DEBUG nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2352.294197] env[68673]: DEBUG nova.network.neutron [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2352.640234] env[68673]: DEBUG nova.network.neutron [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2352.653248] env[68673]: INFO nova.compute.manager [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Took 0.36 seconds to deallocate network for instance. [ 2352.740018] env[68673]: INFO nova.scheduler.client.report [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Deleted allocations for instance 16320084-1a09-493a-8ff2-389da64b92a2 [ 2352.762479] env[68673]: DEBUG oslo_concurrency.lockutils [None req-7375c653-ad97-4fe5-9d1b-d32da6a7b737 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "16320084-1a09-493a-8ff2-389da64b92a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 588.097s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2352.762742] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "16320084-1a09-493a-8ff2-389da64b92a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.919s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2352.763020] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Acquiring lock "16320084-1a09-493a-8ff2-389da64b92a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2352.763242] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "16320084-1a09-493a-8ff2-389da64b92a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2352.763410] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "16320084-1a09-493a-8ff2-389da64b92a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2352.765593] env[68673]: INFO nova.compute.manager [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Terminating instance [ 2352.767413] env[68673]: DEBUG nova.compute.manager [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2352.767661] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2352.768131] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-680f9567-68a0-4d1f-94c9-140302a31c2c {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.777368] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a705bf16-e952-413e-847b-27d803d10cef {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.808639] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 16320084-1a09-493a-8ff2-389da64b92a2 could not be found. [ 2352.808872] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2352.809078] env[68673]: INFO nova.compute.manager [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2352.809397] env[68673]: DEBUG oslo.service.loopingcall [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2352.809549] env[68673]: DEBUG nova.compute.manager [-] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2352.809643] env[68673]: DEBUG nova.network.neutron [-] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2352.834026] env[68673]: DEBUG nova.network.neutron [-] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2352.841948] env[68673]: INFO nova.compute.manager [-] [instance: 16320084-1a09-493a-8ff2-389da64b92a2] Took 0.03 seconds to deallocate network for instance. [ 2352.937870] env[68673]: DEBUG oslo_concurrency.lockutils [None req-b51d04cf-3350-4c1e-99d8-83b17fca2063 tempest-DeleteServersTestJSON-1393776232 tempest-DeleteServersTestJSON-1393776232-project-member] Lock "16320084-1a09-493a-8ff2-389da64b92a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2356.518910] env[68673]: DEBUG oslo_concurrency.lockutils [None req-a3377690-f163-41b7-940e-1078fdba3071 tempest-InstanceActionsNegativeTestJSON-354416706 tempest-InstanceActionsNegativeTestJSON-354416706-project-member] Acquiring lock "4000d9e1-c566-4b4f-be56-eacaafa0a0a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.682026] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Acquiring lock "2ee49144-b168-42bf-8807-797bbe12839e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.682026] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Lock "2ee49144-b168-42bf-8807-797bbe12839e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2366.692766] env[68673]: DEBUG nova.compute.manager [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Starting instance... {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2366.742813] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.743072] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2366.744494] env[68673]: INFO nova.compute.claims [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2366.940680] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02c6cd2-5d7c-47ee-8b48-cccc42949e05 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.951460] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc480d6-1ba0-4302-9723-08ca1827f0f0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.984247] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8eb0d73-d619-4515-a43c-390389a7a063 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.991583] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36951319-0ca8-4535-b5a7-c46a6a8ce6f7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.004983] env[68673]: DEBUG nova.compute.provider_tree [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2367.013416] env[68673]: DEBUG nova.scheduler.client.report [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2367.029445] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.286s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2367.029980] env[68673]: DEBUG nova.compute.manager [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Start building networks asynchronously for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2367.064221] env[68673]: DEBUG nova.compute.utils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Using /dev/sd instead of None {{(pid=68673) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2367.065859] env[68673]: DEBUG nova.compute.manager [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Allocating IP information in the background. {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2367.066120] env[68673]: DEBUG nova.network.neutron [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] allocate_for_instance() {{(pid=68673) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2367.075768] env[68673]: DEBUG nova.compute.manager [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Start building block device mappings for instance. {{(pid=68673) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2367.120715] env[68673]: DEBUG nova.policy [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0423a212bf34027acb6333ffa8f5748', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c53d529361aa4c6ebab0210ab9be6569', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68673) authorize /opt/stack/nova/nova/policy.py:203}} [ 2367.138857] env[68673]: DEBUG nova.compute.manager [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Start spawning the instance on the hypervisor. {{(pid=68673) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2367.167509] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-15T11:24:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-15T11:24:23Z,direct_url=,disk_format='vmdk',id=7da4e48b-416f-425b-b73b-3305c69c87ef,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='706e55e049ed41a4bb47b7f5d092a466',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-15T11:24:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2367.167764] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Flavor limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2367.167934] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Image limits 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2367.168140] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Flavor pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2367.168289] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Image pref 0:0:0 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2367.168437] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68673) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2367.168644] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2367.168803] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2367.168970] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Got 1 possible topologies {{(pid=68673) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2367.169370] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2367.169561] env[68673]: DEBUG nova.virt.hardware [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68673) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2367.170452] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5501e5ca-86bc-4c45-9e29-0e39e8fd44dc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.178841] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ef1780-d5b3-4556-a0a7-c40236bb5630 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.418225] env[68673]: DEBUG nova.network.neutron [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Successfully created port: 3e54861d-2b33-4ac8-89e8-d8830a8b2a8f {{(pid=68673) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2368.147121] env[68673]: DEBUG nova.compute.manager [req-33756a93-1687-4561-9a78-d6cebf60010f req-60a8d6b1-77e1-46bc-8c17-3c011fb29d2b service nova] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Received event network-vif-plugged-3e54861d-2b33-4ac8-89e8-d8830a8b2a8f {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2368.147377] env[68673]: DEBUG oslo_concurrency.lockutils [req-33756a93-1687-4561-9a78-d6cebf60010f req-60a8d6b1-77e1-46bc-8c17-3c011fb29d2b service nova] Acquiring lock "2ee49144-b168-42bf-8807-797bbe12839e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2368.147555] env[68673]: DEBUG oslo_concurrency.lockutils [req-33756a93-1687-4561-9a78-d6cebf60010f req-60a8d6b1-77e1-46bc-8c17-3c011fb29d2b service nova] Lock "2ee49144-b168-42bf-8807-797bbe12839e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.147806] env[68673]: DEBUG oslo_concurrency.lockutils [req-33756a93-1687-4561-9a78-d6cebf60010f req-60a8d6b1-77e1-46bc-8c17-3c011fb29d2b service nova] Lock "2ee49144-b168-42bf-8807-797bbe12839e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2368.147934] env[68673]: DEBUG nova.compute.manager [req-33756a93-1687-4561-9a78-d6cebf60010f req-60a8d6b1-77e1-46bc-8c17-3c011fb29d2b service nova] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] No waiting events found dispatching network-vif-plugged-3e54861d-2b33-4ac8-89e8-d8830a8b2a8f {{(pid=68673) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2368.148204] env[68673]: WARNING nova.compute.manager [req-33756a93-1687-4561-9a78-d6cebf60010f req-60a8d6b1-77e1-46bc-8c17-3c011fb29d2b service nova] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Received unexpected event network-vif-plugged-3e54861d-2b33-4ac8-89e8-d8830a8b2a8f for instance with vm_state building and task_state spawning. [ 2368.370106] env[68673]: DEBUG nova.network.neutron [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Successfully updated port: 3e54861d-2b33-4ac8-89e8-d8830a8b2a8f {{(pid=68673) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2368.382037] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Acquiring lock "refresh_cache-2ee49144-b168-42bf-8807-797bbe12839e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2368.382195] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Acquired lock "refresh_cache-2ee49144-b168-42bf-8807-797bbe12839e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2368.382428] env[68673]: DEBUG nova.network.neutron [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Building network info cache for instance {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2368.427098] env[68673]: DEBUG nova.network.neutron [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Instance cache missing network info. {{(pid=68673) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2368.587346] env[68673]: DEBUG nova.network.neutron [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Updating instance_info_cache with network_info: [{"id": "3e54861d-2b33-4ac8-89e8-d8830a8b2a8f", "address": "fa:16:3e:8c:4d:76", "network": {"id": "78d185d8-5ca0-4abb-b4d4-f7d3ed29ebfa", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-985113484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c53d529361aa4c6ebab0210ab9be6569", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e54861d-2b", "ovs_interfaceid": "3e54861d-2b33-4ac8-89e8-d8830a8b2a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2368.600716] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Releasing lock "refresh_cache-2ee49144-b168-42bf-8807-797bbe12839e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2368.601036] env[68673]: DEBUG nova.compute.manager [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Instance network_info: |[{"id": "3e54861d-2b33-4ac8-89e8-d8830a8b2a8f", "address": "fa:16:3e:8c:4d:76", "network": {"id": "78d185d8-5ca0-4abb-b4d4-f7d3ed29ebfa", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-985113484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c53d529361aa4c6ebab0210ab9be6569", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e54861d-2b", "ovs_interfaceid": "3e54861d-2b33-4ac8-89e8-d8830a8b2a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68673) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2368.601456] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:4d:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e350f83a-f581-4e10-ac16-0b0f7bfd3d38', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e54861d-2b33-4ac8-89e8-d8830a8b2a8f', 'vif_model': 'vmxnet3'}] {{(pid=68673) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2368.609292] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Creating folder: Project (c53d529361aa4c6ebab0210ab9be6569). Parent ref: group-v685311. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2368.609820] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf8c2dd5-0607-467e-b628-06730c7569fb {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.620929] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Created folder: Project (c53d529361aa4c6ebab0210ab9be6569) in parent group-v685311. [ 2368.621126] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Creating folder: Instances. Parent ref: group-v685427. {{(pid=68673) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2368.621345] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75fb7b22-3d84-452e-ac7b-76776aa52423 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.629757] env[68673]: INFO nova.virt.vmwareapi.vm_util [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Created folder: Instances in parent group-v685427. [ 2368.629985] env[68673]: DEBUG oslo.service.loopingcall [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2368.630175] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Creating VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2368.630359] env[68673]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4320208-1158-4d9c-9ccf-e400117111b9 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.649162] env[68673]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2368.649162] env[68673]: value = "task-3433649" [ 2368.649162] env[68673]: _type = "Task" [ 2368.649162] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2368.657825] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433649, 'name': CreateVM_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2369.161034] env[68673]: DEBUG oslo_vmware.api [-] Task: {'id': task-3433649, 'name': CreateVM_Task, 'duration_secs': 0.29109} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2369.161034] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Created VM on the ESX host {{(pid=68673) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2369.161034] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2369.161034] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2369.161034] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2369.161585] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d56f36e2-b439-4d8f-84f7-674238f93c60 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.164752] env[68673]: DEBUG oslo_vmware.api [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Waiting for the task: (returnval){ [ 2369.164752] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]521706d5-1c89-52dd-2d56-d86323637169" [ 2369.164752] env[68673]: _type = "Task" [ 2369.164752] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2369.175347] env[68673]: DEBUG oslo_vmware.api [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]521706d5-1c89-52dd-2d56-d86323637169, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2369.675685] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2369.675894] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Processing image 7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2369.676118] env[68673]: DEBUG oslo_concurrency.lockutils [None req-96713edf-795b-40fd-a841-85741f832ea4 tempest-ServerDiskConfigTestJSON-1416219078 tempest-ServerDiskConfigTestJSON-1416219078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2370.299359] env[68673]: DEBUG nova.compute.manager [req-6a0da916-6fae-497c-ad76-bd93fef90056 req-6e8adc9b-b343-4f45-858c-303552945460 service nova] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Received event network-changed-3e54861d-2b33-4ac8-89e8-d8830a8b2a8f {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2370.299752] env[68673]: DEBUG nova.compute.manager [req-6a0da916-6fae-497c-ad76-bd93fef90056 req-6e8adc9b-b343-4f45-858c-303552945460 service nova] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Refreshing instance network info cache due to event network-changed-3e54861d-2b33-4ac8-89e8-d8830a8b2a8f. {{(pid=68673) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2370.299752] env[68673]: DEBUG oslo_concurrency.lockutils [req-6a0da916-6fae-497c-ad76-bd93fef90056 req-6e8adc9b-b343-4f45-858c-303552945460 service nova] Acquiring lock "refresh_cache-2ee49144-b168-42bf-8807-797bbe12839e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2370.299898] env[68673]: DEBUG oslo_concurrency.lockutils [req-6a0da916-6fae-497c-ad76-bd93fef90056 req-6e8adc9b-b343-4f45-858c-303552945460 service nova] Acquired lock "refresh_cache-2ee49144-b168-42bf-8807-797bbe12839e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2370.300146] env[68673]: DEBUG nova.network.neutron [req-6a0da916-6fae-497c-ad76-bd93fef90056 req-6e8adc9b-b343-4f45-858c-303552945460 service nova] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Refreshing network info cache for port 3e54861d-2b33-4ac8-89e8-d8830a8b2a8f {{(pid=68673) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2370.609147] env[68673]: DEBUG nova.network.neutron [req-6a0da916-6fae-497c-ad76-bd93fef90056 req-6e8adc9b-b343-4f45-858c-303552945460 service nova] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Updated VIF entry in instance network info cache for port 3e54861d-2b33-4ac8-89e8-d8830a8b2a8f. {{(pid=68673) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2370.609486] env[68673]: DEBUG nova.network.neutron [req-6a0da916-6fae-497c-ad76-bd93fef90056 req-6e8adc9b-b343-4f45-858c-303552945460 service nova] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Updating instance_info_cache with network_info: [{"id": "3e54861d-2b33-4ac8-89e8-d8830a8b2a8f", "address": "fa:16:3e:8c:4d:76", "network": {"id": "78d185d8-5ca0-4abb-b4d4-f7d3ed29ebfa", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-985113484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c53d529361aa4c6ebab0210ab9be6569", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e54861d-2b", "ovs_interfaceid": "3e54861d-2b33-4ac8-89e8-d8830a8b2a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2370.618737] env[68673]: DEBUG oslo_concurrency.lockutils [req-6a0da916-6fae-497c-ad76-bd93fef90056 req-6e8adc9b-b343-4f45-858c-303552945460 service nova] Releasing lock "refresh_cache-2ee49144-b168-42bf-8807-797bbe12839e" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2400.783548] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2400.783848] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2400.783971] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2400.784106] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2401.790252] env[68673]: WARNING oslo_vmware.rw_handles [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2401.790252] env[68673]: ERROR oslo_vmware.rw_handles [ 2401.790836] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2401.792667] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2401.792939] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Copying Virtual Disk [datastore1] vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/7c051a39-3f6a-454d-80ba-e1fd5a7afe88/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2401.793253] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-559a5a18-af2d-4cac-b5da-b803134de4c0 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.805507] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 2401.805507] env[68673]: value = "task-3433650" [ 2401.805507] env[68673]: _type = "Task" [ 2401.805507] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2401.813382] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.316567] env[68673]: DEBUG oslo_vmware.exceptions [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2402.316813] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2402.317373] env[68673]: ERROR nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2402.317373] env[68673]: Faults: ['InvalidArgument'] [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Traceback (most recent call last): [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] yield resources [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] self.driver.spawn(context, instance, image_meta, [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] self._fetch_image_if_missing(context, vi) [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2402.317373] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] image_cache(vi, tmp_image_ds_loc) [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] vm_util.copy_virtual_disk( [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] session._wait_for_task(vmdk_copy_task) [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] return self.wait_for_task(task_ref) [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] return evt.wait() [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] result = hub.switch() [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] return self.greenlet.switch() [ 2402.317821] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2402.318194] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] self.f(*self.args, **self.kw) [ 2402.318194] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2402.318194] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] raise exceptions.translate_fault(task_info.error) [ 2402.318194] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2402.318194] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Faults: ['InvalidArgument'] [ 2402.318194] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] [ 2402.318194] env[68673]: INFO nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Terminating instance [ 2402.319264] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2402.319470] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2402.319704] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94ec715e-262f-415f-b12a-2b525fc2d099 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.321827] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2402.322040] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2402.322736] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f736ab6-c54d-4960-a3c5-88d5fa80cda1 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.330492] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2402.330711] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e172a4c-9212-4720-9653-34706bb074dc {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.332878] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2402.333064] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2402.333997] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a3d49a0-d23f-4ea1-9059-06783605fd12 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.338775] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 2402.338775] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52b82bf9-be68-cabd-37fc-4a394091552f" [ 2402.338775] env[68673]: _type = "Task" [ 2402.338775] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.345707] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]52b82bf9-be68-cabd-37fc-4a394091552f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.402093] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2402.402326] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2402.402506] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleting the datastore file [datastore1] 0dd8e45a-d77a-4c9b-a733-353fce754549 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2402.402763] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2ebbda7-4b72-4075-890b-bf967f0d06f4 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.408642] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for the task: (returnval){ [ 2402.408642] env[68673]: value = "task-3433652" [ 2402.408642] env[68673]: _type = "Task" [ 2402.408642] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.415903] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433652, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.784928] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2402.785159] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2402.785286] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2402.806980] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.807246] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.807283] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.807394] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.807566] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.807699] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.807820] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.807937] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.808065] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.808184] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2402.808300] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2402.849121] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2402.849378] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating directory with path [datastore1] vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2402.849606] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4028038c-2dc7-4684-8e20-7aa6d63194ba {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.861090] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Created directory with path [datastore1] vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2402.861286] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Fetch image to [datastore1] vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2402.861460] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2402.862180] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bb5d95-dad5-484f-8773-46c548004598 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.868764] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076e443e-b038-46c5-be68-9def7cde1919 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.877642] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685d948b-a9c8-4956-86b2-b13c8a0d8837 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.907857] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569fd153-30b3-41c5-80eb-96805c98bd96 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.918902] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-12efcff7-6203-4c83-a7da-06498e1de701 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.920595] env[68673]: DEBUG oslo_vmware.api [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Task: {'id': task-3433652, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063999} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2402.920826] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2402.921009] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2402.921186] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2402.921355] env[68673]: INFO nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2402.923452] env[68673]: DEBUG nova.compute.claims [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2402.923646] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2402.923859] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2402.943810] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2402.996817] env[68673]: DEBUG oslo_vmware.rw_handles [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2403.057645] env[68673]: DEBUG oslo_vmware.rw_handles [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2403.057859] env[68673]: DEBUG oslo_vmware.rw_handles [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2403.059583] env[68673]: DEBUG nova.scheduler.client.report [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Refreshing inventories for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2403.074809] env[68673]: DEBUG nova.scheduler.client.report [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Updating ProviderTree inventory for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2403.075036] env[68673]: DEBUG nova.compute.provider_tree [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Updating inventory in ProviderTree for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2403.085761] env[68673]: DEBUG nova.scheduler.client.report [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Refreshing aggregate associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, aggregates: None {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2403.102641] env[68673]: DEBUG nova.scheduler.client.report [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Refreshing trait associations for resource provider fd6b1504-0fb7-49fe-8051-ab853a390b4e, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68673) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2403.219284] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defe8fd1-4baa-4147-b6d3-e121feba5c8b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.226950] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec1c7cf-5afb-4f7d-9084-2eb278306362 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.255856] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11df3e6c-12a7-4b5d-b557-e8026453ef27 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.262385] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e406746-517a-4c46-ac10-548b95da7386 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.274807] env[68673]: DEBUG nova.compute.provider_tree [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2403.284409] env[68673]: DEBUG nova.scheduler.client.report [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2403.300325] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.376s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.300853] env[68673]: ERROR nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2403.300853] env[68673]: Faults: ['InvalidArgument'] [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Traceback (most recent call last): [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] self.driver.spawn(context, instance, image_meta, [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] self._fetch_image_if_missing(context, vi) [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] image_cache(vi, tmp_image_ds_loc) [ 2403.300853] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] vm_util.copy_virtual_disk( [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] session._wait_for_task(vmdk_copy_task) [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] return self.wait_for_task(task_ref) [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] return evt.wait() [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] result = hub.switch() [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] return self.greenlet.switch() [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2403.301198] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] self.f(*self.args, **self.kw) [ 2403.301653] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2403.301653] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] raise exceptions.translate_fault(task_info.error) [ 2403.301653] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2403.301653] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Faults: ['InvalidArgument'] [ 2403.301653] env[68673]: ERROR nova.compute.manager [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] [ 2403.301653] env[68673]: DEBUG nova.compute.utils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2403.302994] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Build of instance 0dd8e45a-d77a-4c9b-a733-353fce754549 was re-scheduled: A specified parameter was not correct: fileType [ 2403.302994] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2403.303394] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2403.303572] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2403.303747] env[68673]: DEBUG nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2403.303914] env[68673]: DEBUG nova.network.neutron [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2403.639872] env[68673]: DEBUG nova.network.neutron [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2403.652020] env[68673]: INFO nova.compute.manager [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Took 0.35 seconds to deallocate network for instance. [ 2403.745308] env[68673]: INFO nova.scheduler.client.report [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Deleted allocations for instance 0dd8e45a-d77a-4c9b-a733-353fce754549 [ 2403.775267] env[68673]: DEBUG oslo_concurrency.lockutils [None req-95ca5fea-de87-4483-948f-99f004c70b86 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "0dd8e45a-d77a-4c9b-a733-353fce754549" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 587.839s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.775574] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "0dd8e45a-d77a-4c9b-a733-353fce754549" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.662s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.775810] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Acquiring lock "0dd8e45a-d77a-4c9b-a733-353fce754549-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.776029] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "0dd8e45a-d77a-4c9b-a733-353fce754549-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.776201] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "0dd8e45a-d77a-4c9b-a733-353fce754549-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.779847] env[68673]: INFO nova.compute.manager [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Terminating instance [ 2403.781655] env[68673]: DEBUG nova.compute.manager [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2403.781846] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2403.782113] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bb41961-94d5-4b64-bef8-845270cfd964 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.790517] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b85f21e-e279-437b-869d-1695ba7aaa3e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.820653] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0dd8e45a-d77a-4c9b-a733-353fce754549 could not be found. [ 2403.820966] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2403.821058] env[68673]: INFO nova.compute.manager [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2403.821292] env[68673]: DEBUG oslo.service.loopingcall [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2403.821526] env[68673]: DEBUG nova.compute.manager [-] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2403.821526] env[68673]: DEBUG nova.network.neutron [-] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2403.849073] env[68673]: DEBUG nova.network.neutron [-] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2403.856740] env[68673]: INFO nova.compute.manager [-] [instance: 0dd8e45a-d77a-4c9b-a733-353fce754549] Took 0.04 seconds to deallocate network for instance. [ 2403.938167] env[68673]: DEBUG oslo_concurrency.lockutils [None req-9818b295-a244-4605-981c-6965a57ea620 tempest-ImagesTestJSON-965171807 tempest-ImagesTestJSON-965171807-project-member] Lock "0dd8e45a-d77a-4c9b-a733-353fce754549" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2404.783485] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2404.783785] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2405.784933] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2405.784933] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2405.795689] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2405.795946] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.796176] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2405.796381] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68673) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2405.797881] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d77d88e-5be1-4fb0-9ddf-1b2b7b084e63 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.806584] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0cb094-080b-4814-88cc-f4d826ab32f3 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.821325] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed77614a-f4d1-4847-b74e-7b45ff232d29 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.827541] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25db438d-5d34-4a81-ba8b-3d042ef40a39 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.857761] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180863MB free_disk=98GB free_vcpus=48 pci_devices=None {{(pid=68673) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2405.857973] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2405.858141] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.925063] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.925236] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 07f2fc85-14df-4702-bf49-67d8ce8e9526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.925364] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 85ee6b4a-3c96-4be5-81d2-8b3ca661924e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.925486] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 4000d9e1-c566-4b4f-be56-eacaafa0a0a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.925605] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 3277e704-c970-4482-a812-f02e297f99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.925721] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 643c6d20-b3b9-440d-82f2-7c09a609717d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.925835] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance f7b5663b-1c63-47d9-85dc-59a47a82d5b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.925947] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.926079] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Instance 2ee49144-b168-42bf-8807-797bbe12839e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68673) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2405.926277] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2405.926414] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68673) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2406.047064] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbbe9f1-03e6-4ed2-aee3-a55ded71233f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.054710] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29879270-d2f8-453b-948e-a178152e1912 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.083811] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7409f3be-d9b8-4a51-97e2-7ceb09d6481e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.090784] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b916755a-f204-46da-a7bb-d8328ba6c7b6 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.103649] env[68673]: DEBUG nova.compute.provider_tree [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2406.112465] env[68673]: DEBUG nova.scheduler.client.report [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2406.126078] env[68673]: DEBUG nova.compute.resource_tracker [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68673) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2406.126284] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.268s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2408.121995] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2408.783605] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2410.763242] env[68673]: DEBUG oslo_concurrency.lockutils [None req-271907af-c6e6-4628-9ec5-3631e9e86125 tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "f7b5663b-1c63-47d9-85dc-59a47a82d5b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2410.826966] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d210c838-ca40-41e3-bd06-5b9094fd2e01 tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "643c6d20-b3b9-440d-82f2-7c09a609717d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2410.893449] env[68673]: DEBUG oslo_concurrency.lockutils [None req-546f6533-0a09-4bf8-a972-ebe0d3dc2496 tempest-ListServersNegativeTestJSON-1652088483 tempest-ListServersNegativeTestJSON-1652088483-project-member] Acquiring lock "3277e704-c970-4482-a812-f02e297f99d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2412.093982] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2412.094492] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Getting list of instances from cluster (obj){ [ 2412.094492] env[68673]: value = "domain-c8" [ 2412.094492] env[68673]: _type = "ClusterComputeResource" [ 2412.094492] env[68673]: } {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2412.095610] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c01dba-4453-42fb-9a72-7e43fd019d8b {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.112224] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Got total of 9 instances {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2413.783841] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.792046] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.792379] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2421.802789] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] There are 0 instances to clean {{(pid=68673) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2421.803028] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.803171] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Cleaning up deleted instances with incomplete migration {{(pid=68673) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2449.092063] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2449.115364] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Getting list of instances from cluster (obj){ [ 2449.115364] env[68673]: value = "domain-c8" [ 2449.115364] env[68673]: _type = "ClusterComputeResource" [ 2449.115364] env[68673]: } {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2449.115364] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2d1fba-e639-4a56-b783-11a6ab85b217 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.131461] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Got total of 9 instances {{(pid=68673) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2449.131640] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid ec4370aa-2353-4f9c-82c7-5707e3337c94 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.131833] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 07f2fc85-14df-4702-bf49-67d8ce8e9526 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.132073] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 85ee6b4a-3c96-4be5-81d2-8b3ca661924e {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.132258] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 4000d9e1-c566-4b4f-be56-eacaafa0a0a1 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.132416] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 3277e704-c970-4482-a812-f02e297f99d1 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.132561] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 643c6d20-b3b9-440d-82f2-7c09a609717d {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.136077] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid f7b5663b-1c63-47d9-85dc-59a47a82d5b9 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.136077] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7 {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.136077] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Triggering sync for uuid 2ee49144-b168-42bf-8807-797bbe12839e {{(pid=68673) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2449.136077] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.136077] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "07f2fc85-14df-4702-bf49-67d8ce8e9526" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.136236] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "85ee6b4a-3c96-4be5-81d2-8b3ca661924e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.136236] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "4000d9e1-c566-4b4f-be56-eacaafa0a0a1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.136236] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "3277e704-c970-4482-a812-f02e297f99d1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.136236] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "643c6d20-b3b9-440d-82f2-7c09a609717d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.136345] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "f7b5663b-1c63-47d9-85dc-59a47a82d5b9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.136345] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "8f083884-b49e-4b55-a26d-7fcbb4bb6ae7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.136345] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Acquiring lock "2ee49144-b168-42bf-8807-797bbe12839e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2453.084279] env[68673]: WARNING oslo_vmware.rw_handles [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles response.begin() [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2453.084279] env[68673]: ERROR oslo_vmware.rw_handles [ 2453.084893] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Downloaded image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2453.086910] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Caching image {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2453.087315] env[68673]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Copying Virtual Disk [datastore1] vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk to [datastore1] vmware_temp/897cd4b6-bd20-4fc9-95da-e65908cd7799/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk {{(pid=68673) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2453.087653] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e4c3629-8621-4774-b707-05b672a69999 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.096189] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 2453.096189] env[68673]: value = "task-3433653" [ 2453.096189] env[68673]: _type = "Task" [ 2453.096189] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.104592] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.605988] env[68673]: DEBUG oslo_vmware.exceptions [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Fault InvalidArgument not matched. {{(pid=68673) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2453.606309] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2453.606896] env[68673]: ERROR nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2453.606896] env[68673]: Faults: ['InvalidArgument'] [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Traceback (most recent call last): [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] yield resources [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] self.driver.spawn(context, instance, image_meta, [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] self._fetch_image_if_missing(context, vi) [ 2453.606896] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] image_cache(vi, tmp_image_ds_loc) [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] vm_util.copy_virtual_disk( [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] session._wait_for_task(vmdk_copy_task) [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] return self.wait_for_task(task_ref) [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] return evt.wait() [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] result = hub.switch() [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2453.607224] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] return self.greenlet.switch() [ 2453.607614] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2453.607614] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] self.f(*self.args, **self.kw) [ 2453.607614] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2453.607614] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] raise exceptions.translate_fault(task_info.error) [ 2453.607614] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2453.607614] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Faults: ['InvalidArgument'] [ 2453.607614] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] [ 2453.607614] env[68673]: INFO nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Terminating instance [ 2453.608783] env[68673]: DEBUG oslo_concurrency.lockutils [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7da4e48b-416f-425b-b73b-3305c69c87ef/7da4e48b-416f-425b-b73b-3305c69c87ef.vmdk" {{(pid=68673) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2453.608985] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2453.609231] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4278c571-5db6-428c-aca5-ebcfa0f7333f {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.611364] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2453.611553] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2453.612266] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ff6d8a-31ac-4ffd-8801-90d4a95dc7ea {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.618926] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Unregistering the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2453.619148] env[68673]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d99e5bd4-c465-4e88-b94e-4e47085aac57 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.621228] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2453.621401] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68673) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2453.622382] env[68673]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-094f1a1e-99c7-4186-b19b-c936beb03e72 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.626849] env[68673]: DEBUG oslo_vmware.api [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Waiting for the task: (returnval){ [ 2453.626849] env[68673]: value = "session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5226d405-e5b3-80c3-a626-d8898d929cac" [ 2453.626849] env[68673]: _type = "Task" [ 2453.626849] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.634387] env[68673]: DEBUG oslo_vmware.api [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Task: {'id': session[52687a4b-da6b-fa33-6c5b-294f4e3845fb]5226d405-e5b3-80c3-a626-d8898d929cac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.683378] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Unregistered the VM {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2453.683696] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Deleting contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2453.683881] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleting the datastore file [datastore1] ec4370aa-2353-4f9c-82c7-5707e3337c94 {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2453.684162] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18572541-7f95-4201-837d-38dd60e2bb81 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.690411] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for the task: (returnval){ [ 2453.690411] env[68673]: value = "task-3433655" [ 2453.690411] env[68673]: _type = "Task" [ 2453.690411] env[68673]: } to complete. {{(pid=68673) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.698533] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433655, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2454.137249] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Preparing fetch location {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2454.137557] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating directory with path [datastore1] vmware_temp/8ad5e453-0969-4526-ba38-06e4dee39f34/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2454.137753] env[68673]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c750f41-b5ed-4388-a98f-9b6760f4d7b7 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.150319] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Created directory with path [datastore1] vmware_temp/8ad5e453-0969-4526-ba38-06e4dee39f34/7da4e48b-416f-425b-b73b-3305c69c87ef {{(pid=68673) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2454.150319] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Fetch image to [datastore1] vmware_temp/8ad5e453-0969-4526-ba38-06e4dee39f34/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk {{(pid=68673) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2454.150319] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to [datastore1] vmware_temp/8ad5e453-0969-4526-ba38-06e4dee39f34/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk on the data store datastore1 {{(pid=68673) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2454.151304] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62452de-e2a2-45d0-8275-e5d2b31eb1d8 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.157357] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ccc981-9104-43a7-ab1d-af2afa19f20d {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.166116] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed38907-e9ce-46f1-bcb5-9f53128b6705 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.199646] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7bf0b7-3449-46d6-848f-9e20abf82eee {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.206656] env[68673]: DEBUG oslo_vmware.api [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Task: {'id': task-3433655, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071449} completed successfully. {{(pid=68673) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2454.208062] env[68673]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleted the datastore file {{(pid=68673) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2454.208256] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Deleted contents of the VM from datastore datastore1 {{(pid=68673) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2454.208431] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2454.208603] env[68673]: INFO nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2454.210347] env[68673]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-942fb641-1b9d-41f6-837d-a91ae1141a39 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.212245] env[68673]: DEBUG nova.compute.claims [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Aborting claim: {{(pid=68673) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2454.212412] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2454.212618] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2454.233639] env[68673]: DEBUG nova.virt.vmwareapi.images [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Downloading image file data 7da4e48b-416f-425b-b73b-3305c69c87ef to the data store datastore1 {{(pid=68673) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2454.368548] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a00e187-8039-4226-9fc4-e69c3ba0abd2 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.372308] env[68673]: DEBUG oslo_vmware.rw_handles [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8ad5e453-0969-4526-ba38-06e4dee39f34/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2454.431178] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa839c88-1d37-4b38-a210-e35cda36eb66 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.436356] env[68673]: DEBUG oslo_vmware.rw_handles [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Completed reading data from the image iterator. {{(pid=68673) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2454.436532] env[68673]: DEBUG oslo_vmware.rw_handles [None req-f55d08dd-eb48-4241-b45e-77000a8bad97 tempest-AttachInterfacesTestJSON-251783821 tempest-AttachInterfacesTestJSON-251783821-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8ad5e453-0969-4526-ba38-06e4dee39f34/7da4e48b-416f-425b-b73b-3305c69c87ef/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68673) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2454.464318] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c234e845-e930-4163-a97e-7f84ac0b7a89 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.471389] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b666d303-55be-4833-a051-b131d851ea26 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.484253] env[68673]: DEBUG nova.compute.provider_tree [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed in ProviderTree for provider: fd6b1504-0fb7-49fe-8051-ab853a390b4e {{(pid=68673) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2454.493010] env[68673]: DEBUG nova.scheduler.client.report [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Inventory has not changed for provider fd6b1504-0fb7-49fe-8051-ab853a390b4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 98, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68673) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2454.505919] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2454.506442] env[68673]: ERROR nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2454.506442] env[68673]: Faults: ['InvalidArgument'] [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Traceback (most recent call last): [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] self.driver.spawn(context, instance, image_meta, [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] self._fetch_image_if_missing(context, vi) [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] image_cache(vi, tmp_image_ds_loc) [ 2454.506442] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] vm_util.copy_virtual_disk( [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] session._wait_for_task(vmdk_copy_task) [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] return self.wait_for_task(task_ref) [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] return evt.wait() [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] result = hub.switch() [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] return self.greenlet.switch() [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2454.506785] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] self.f(*self.args, **self.kw) [ 2454.507122] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2454.507122] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] raise exceptions.translate_fault(task_info.error) [ 2454.507122] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2454.507122] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Faults: ['InvalidArgument'] [ 2454.507122] env[68673]: ERROR nova.compute.manager [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] [ 2454.507122] env[68673]: DEBUG nova.compute.utils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] VimFaultException {{(pid=68673) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2454.508817] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Build of instance ec4370aa-2353-4f9c-82c7-5707e3337c94 was re-scheduled: A specified parameter was not correct: fileType [ 2454.508817] env[68673]: Faults: ['InvalidArgument'] {{(pid=68673) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2454.509200] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Unplugging VIFs for instance {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2454.509371] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68673) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2454.509541] env[68673]: DEBUG nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2454.509705] env[68673]: DEBUG nova.network.neutron [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2454.815164] env[68673]: DEBUG nova.network.neutron [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2454.826350] env[68673]: INFO nova.compute.manager [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Took 0.32 seconds to deallocate network for instance. [ 2454.923033] env[68673]: INFO nova.scheduler.client.report [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Deleted allocations for instance ec4370aa-2353-4f9c-82c7-5707e3337c94 [ 2454.943158] env[68673]: DEBUG oslo_concurrency.lockutils [None req-d7d23921-9e49-4d20-bf42-ea852d8a1fa7 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 517.028s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2454.943334] env[68673]: DEBUG oslo_concurrency.lockutils [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 321.573s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2454.943570] env[68673]: DEBUG oslo_concurrency.lockutils [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Acquiring lock "ec4370aa-2353-4f9c-82c7-5707e3337c94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2454.943783] env[68673]: DEBUG oslo_concurrency.lockutils [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2454.943954] env[68673]: DEBUG oslo_concurrency.lockutils [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2454.946361] env[68673]: INFO nova.compute.manager [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Terminating instance [ 2454.948441] env[68673]: DEBUG nova.compute.manager [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Start destroying the instance on the hypervisor. {{(pid=68673) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2454.948653] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Destroying instance {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2454.948915] env[68673]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a9e0975-8bd6-4c2b-b5eb-d0fdeecf2a82 {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.958544] env[68673]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca394484-5bdd-4259-bde2-78f19d482b9e {{(pid=68673) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.986969] env[68673]: WARNING nova.virt.vmwareapi.vmops [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ec4370aa-2353-4f9c-82c7-5707e3337c94 could not be found. [ 2454.987197] env[68673]: DEBUG nova.virt.vmwareapi.vmops [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Instance destroyed {{(pid=68673) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2454.987378] env[68673]: INFO nova.compute.manager [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2454.987653] env[68673]: DEBUG oslo.service.loopingcall [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68673) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2454.987885] env[68673]: DEBUG nova.compute.manager [-] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Deallocating network for instance {{(pid=68673) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2454.987982] env[68673]: DEBUG nova.network.neutron [-] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] deallocate_for_instance() {{(pid=68673) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2455.012325] env[68673]: DEBUG nova.network.neutron [-] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Updating instance_info_cache with network_info: [] {{(pid=68673) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2455.020509] env[68673]: INFO nova.compute.manager [-] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] Took 0.03 seconds to deallocate network for instance. [ 2455.116602] env[68673]: DEBUG oslo_concurrency.lockutils [None req-51ca2acf-8ffe-464d-993d-40a180c57a13 tempest-ServersTestJSON-1463022821 tempest-ServersTestJSON-1463022821-project-member] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.173s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2455.117309] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.984s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2455.117552] env[68673]: INFO nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: ec4370aa-2353-4f9c-82c7-5707e3337c94] During sync_power_state the instance has a pending task (deleting). Skip. [ 2455.117731] env[68673]: DEBUG oslo_concurrency.lockutils [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Lock "ec4370aa-2353-4f9c-82c7-5707e3337c94" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68673) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2461.783008] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2461.783464] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2461.783507] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2461.783643] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68673) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2464.780208] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2464.782886] env[68673]: DEBUG oslo_service.periodic_task [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68673) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2464.783127] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Starting heal instance info cache {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2464.783262] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Rebuilding the list of instances to heal {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2464.802516] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 07f2fc85-14df-4702-bf49-67d8ce8e9526] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2464.802674] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 85ee6b4a-3c96-4be5-81d2-8b3ca661924e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2464.802803] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 4000d9e1-c566-4b4f-be56-eacaafa0a0a1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2464.802927] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 3277e704-c970-4482-a812-f02e297f99d1] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2464.803068] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 643c6d20-b3b9-440d-82f2-7c09a609717d] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2464.803195] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: f7b5663b-1c63-47d9-85dc-59a47a82d5b9] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2464.803315] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 8f083884-b49e-4b55-a26d-7fcbb4bb6ae7] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2464.803434] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] [instance: 2ee49144-b168-42bf-8807-797bbe12839e] Skipping network cache update for instance because it is Building. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2464.803555] env[68673]: DEBUG nova.compute.manager [None req-e776eecf-ec80-415e-bbd1-f10131fd1827 None None] Didn't find any instances for network info cache update. {{(pid=68673) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}}